diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 6d064ddb9b..26306af66f 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:94bb690db96e6242b2567a4860a94d48fa48696d092e51b0884a1a2c0a79a407 -# created: 2024-07-31T14:52:44.926548819Z + digest: sha256:8e3e7e18255c22d1489258d0374c901c01f9c4fd77a12088670cd73d580aa737 +# created: 2024-12-17T00:59:58.625514486Z diff --git a/.github/release-trigger.yml b/.github/release-trigger.yml index d4ca94189e..21ed4182c8 100644 --- a/.github/release-trigger.yml +++ b/.github/release-trigger.yml @@ -1 +1,2 @@ enabled: true +multiScmName: gapic-generator-python diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml index e04213881e..d5581bc38c 100644 --- a/.github/sync-repo-settings.yaml +++ b/.github/sync-repo-settings.yaml @@ -12,10 +12,13 @@ branchProtectionRules: - 'mypy (3.10)' - 'mypy (3.11)' - 'mypy (3.12)' + - 'mypy (3.13)' - 'showcase (3.7, showcase)' - - 'showcase (3.12, showcase)' + - 'showcase (3.13, showcase)' - 'showcase (3.7, showcase_alternative_templates)' - - 'showcase (3.12, showcase_alternative_templates)' + - 'showcase (3.13, showcase_alternative_templates)' + - 'showcase (3.7, showcase_w_rest_async)' + - 'showcase (3.13, showcase_w_rest_async)' # TODO(dovs): reenable these when the mtls tests have been debugged and fixed # See #1218 for details # - 'showcase-mtls (showcase_mtls)' @@ -28,28 +31,40 @@ branchProtectionRules: - 'showcase-unit (3.10)' - 'showcase-unit (3.11)' - 'showcase-unit (3.12)' + - 'showcase-unit (3.13)' - 'showcase-unit (3.7, _alternative_templates)' - 'showcase-unit (3.8, _alternative_templates)' - 'showcase-unit (3.9, _alternative_templates)' - 'showcase-unit (3.10, _alternative_templates)' - 'showcase-unit (3.11, _alternative_templates)' - 'showcase-unit (3.12, _alternative_templates)' + - 'showcase-unit (3.13, _alternative_templates)' - 'showcase-unit (3.7, _alternative_templates_mixins)' - 'showcase-unit (3.8, _alternative_templates_mixins)' - 'showcase-unit (3.9, _alternative_templates_mixins)' - 'showcase-unit (3.10, _alternative_templates_mixins)' - 'showcase-unit (3.11, _alternative_templates_mixins)' - 'showcase-unit (3.12, _alternative_templates_mixins)' + - 'showcase-unit (3.13, _alternative_templates_mixins)' - 'showcase-unit (3.7, _mixins)' - 'showcase-unit (3.8, _mixins)' - 'showcase-unit (3.9, _mixins)' - 'showcase-unit (3.10, _mixins)' - 'showcase-unit (3.11, _mixins)' - 'showcase-unit (3.12, _mixins)' + - 'showcase-unit (3.13, _mixins)' + - 'showcase-unit (3.7, _w_rest_async)' + - 'showcase-unit (3.8, _w_rest_async)' + - 'showcase-unit (3.9, _w_rest_async)' + - 'showcase-unit (3.10, _w_rest_async)' + - 'showcase-unit (3.11, _w_rest_async)' + - 'showcase-unit (3.12, _w_rest_async)' + - 'showcase-unit (3.13, _w_rest_async)' - 'showcase-unit-add-iam-methods' - 'integration' - 'goldens-lint' - 'goldens-prerelease' + - 'goldens-unit' - 'style-check' - 'snippetgen' - 'unit (3.7)' @@ -58,18 +73,21 @@ branchProtectionRules: - 'unit (3.10)' - 'unit (3.11)' - 'unit (3.12)' + - 'unit (3.13)' - 'fragment (3.7)' - 'fragment (3.8)' - 'fragment (3.9)' - 'fragment (3.10)' - 'fragment (3.11)' - 'fragment (3.12)' + - 'fragment (3.13)' - 'fragment (3.7, _alternative_templates)' - 'fragment (3.8, _alternative_templates)' - 'fragment (3.9, _alternative_templates)' - 'fragment (3.10, _alternative_templates)' - 'fragment (3.11, _alternative_templates)' - 'fragment (3.12, _alternative_templates)' + - 'fragment (3.13, _alternative_templates)' - 'OwlBot Post Processor' requiredApprovingReviewCount: 1 requiresCodeOwnerReviews: true diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 4c00a2bb32..a565a33660 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -38,7 +38,7 @@ jobs: matrix: # Run mypy on all of the supported python versions listed in setup.py # https://github.com/python/mypy/blob/master/setup.py - python: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -55,9 +55,14 @@ jobs: strategy: # Run showcase tests on the lowest and highest supported runtimes matrix: - python: ["3.7", "3.12"] - target: [showcase, showcase_alternative_templates] - runs-on: ubuntu-latest + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2121) Remove `showcase_w_rest_async` target when async rest is GA. + python: ["3.7", "3.13"] + target: [showcase, showcase_alternative_templates, showcase_w_rest_async] + logging_scope: ["", "google"] + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed. + # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix + runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4 - name: Set up Python "${{ matrix.python }}" @@ -89,6 +94,9 @@ jobs: unzip protoc-${PROTOC_VERSION}.zip sudo ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - name: Run showcase tests. + env: + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2286): Construct nox sessions with logging enabled. + GOOGLE_SDK_PYTHON_LOGGING_SCOPE: ${{ matrix.logging_scope }} run: nox -s ${{ matrix.target }}-${{ matrix.python }} showcase-mtls: if: ${{ false }} # TODO(dovs): reenable when #1218 is fixed @@ -103,10 +111,10 @@ jobs: run: | sudo mkdir -p /tmp/workspace/tests/cert/ sudo chown -R ${USER} /tmp/workspace/ - - name: Set up Python "3.12" + - name: Set up Python "3.13" uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.13" cache: 'pip' - name: Copy mtls files run: cp tests/cert/mtls.* /tmp/workspace/tests/cert/ @@ -139,10 +147,14 @@ jobs: showcase-unit: strategy: matrix: - python: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] + python: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] # TODO(https://github.com/googleapis/gapic-generator-python/issues/2121) Remove `_w_rest_async` variant when async rest is GA. variant: ['', _alternative_templates, _mixins, _alternative_templates_mixins, _w_rest_async] - runs-on: ubuntu-latest + logging_scope: ["", "google"] + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed. + # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix + runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4 - name: Set up Python "${{ matrix.python }}" @@ -165,15 +177,18 @@ jobs: - name: Install nox. run: python -m pip install nox - name: Run unit tests. + env: + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2286): Construct nox sessions with logging enabled. + GOOGLE_SDK_PYTHON_LOGGING_SCOPE: ${{ matrix.logging_scope }} run: nox -s showcase_unit${{ matrix.variant }}-${{ matrix.python }} showcase-unit-add-iam-methods: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - name: Set up Python "3.12" + - name: Set up Python "3.13" uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.13" cache: 'pip' - name: Install system dependencies. run: | @@ -198,10 +213,10 @@ jobs: variant: ['', _alternative_templates] steps: - uses: actions/checkout@v4 - - name: Set up Python "3.12" + - name: Set up Python "3.13" uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.13" cache: 'pip' - name: Install system dependencies. run: | @@ -223,10 +238,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - name: Set up Python "3.12" + - name: Set up Python "3.13" uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.13" cache: 'pip' - name: Install system dependencies. run: | @@ -239,8 +254,11 @@ jobs: unit: strategy: matrix: - python: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] - runs-on: ubuntu-latest + python: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed. + # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix + runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python }} @@ -260,9 +278,12 @@ jobs: fragment: strategy: matrix: - python: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] + python: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] variant: ['', _alternative_templates] - runs-on: ubuntu-latest + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed. + # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix + runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python }} @@ -312,29 +333,50 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - name: Set up Python 3.12 + - name: Set up Python 3.13 uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.13" cache: 'pip' - name: Install nox. run: | python -m pip install nox - name: Run blacken and lint on the generated output. run: | - nox -f tests/integration/goldens/asset/noxfile.py -s mypy-3.12 blacken lint - nox -f tests/integration/goldens/credentials/noxfile.py -s mypy-3.12 blacken lint - nox -f tests/integration/goldens/eventarc/noxfile.py -s mypy-3.12 blacken lint - nox -f tests/integration/goldens/logging/noxfile.py -s mypy-3.12 blacken lint - nox -f tests/integration/goldens/redis/noxfile.py -s mypy-3.12 blacken lint + nox -f tests/integration/goldens/asset/noxfile.py -s mypy-3.13 blacken lint + nox -f tests/integration/goldens/credentials/noxfile.py -s mypy-3.13 blacken lint + nox -f tests/integration/goldens/eventarc/noxfile.py -s mypy-3.13 blacken lint + nox -f tests/integration/goldens/logging/noxfile.py -s mypy-3.13 blacken lint + nox -f tests/integration/goldens/redis/noxfile.py -s mypy-3.13 blacken lint + goldens-unit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.13 + uses: actions/setup-python@v5 + with: + python-version: "3.13" + cache: 'pip' + - name: Install nox. + run: | + python -m pip install nox + - name: Run the `unit` nox session + # Exclude testing for asset which requires dependency google-cloud-org-policy + # in order to run unit tests + # See https://github.com/googleapis/gapic-generator-python/issues/1806 + run: | + nox -f tests/integration/goldens/credentials/noxfile.py -s unit-3.13 + nox -f tests/integration/goldens/eventarc/noxfile.py -s unit-3.13 + nox -f tests/integration/goldens/logging/noxfile.py -s unit-3.13 + nox -f tests/integration/goldens/redis/noxfile.py -s unit-3.13 goldens-prerelease: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - name: Set up Python 3.12 + - name: Set up Python 3.13 uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.13" cache: 'pip' - name: Install nox. run: | @@ -352,12 +394,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - name: Set up Python "3.11" + - name: Set up Python "3.13" uses: actions/setup-python@v5 with: - # Do not upgrade this check to python 3.12 until - # https://github.com/hhatto/autopep8/issues/712 is fixed - python-version: "3.11" + python-version: "3.13" cache: 'pip' - name: Install autopep8 run: | diff --git a/.kokoro/docker/docs/requirements.txt b/.kokoro/docker/docs/requirements.txt index 7129c77155..f99a5c4aac 100644 --- a/.kokoro/docker/docs/requirements.txt +++ b/.kokoro/docker/docs/requirements.txt @@ -1,42 +1,72 @@ # -# This file is autogenerated by pip-compile with Python 3.9 +# This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --allow-unsafe --generate-hashes requirements.in +# pip-compile --allow-unsafe --generate-hashes synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in # -argcomplete==3.4.0 \ - --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ - --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f +argcomplete==3.5.2 \ + --hash=sha256:036d020d79048a5d525bc63880d7a4b8d1668566b8a76daf1144c0bbe0f63472 \ + --hash=sha256:23146ed7ac4403b70bd6026402468942ceba34a6732255b9edf5b7354f68a6bb # via nox -colorlog==6.8.2 \ - --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ - --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 +colorlog==6.9.0 \ + --hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \ + --hash=sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2 # via nox -distlib==0.3.8 \ - --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ - --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 +distlib==0.3.9 \ + --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ + --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 # via virtualenv -filelock==3.15.4 \ - --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ - --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 +filelock==3.16.1 \ + --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ + --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 # via virtualenv -nox==2024.4.15 \ - --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ - --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f - # via -r requirements.in -packaging==24.1 \ - --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ - --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 +nox==2024.10.9 \ + --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ + --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 + # via -r synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in +packaging==24.2 \ + --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ + --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f # via nox -platformdirs==4.2.2 \ - --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ - --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 +platformdirs==4.3.6 \ + --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ + --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb # via virtualenv -tomli==2.0.1 \ - --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ - --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f +tomli==2.2.1 \ + --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ + --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \ + --hash=sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c \ + --hash=sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b \ + --hash=sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8 \ + --hash=sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6 \ + --hash=sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77 \ + --hash=sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff \ + --hash=sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea \ + --hash=sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192 \ + --hash=sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249 \ + --hash=sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee \ + --hash=sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4 \ + --hash=sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98 \ + --hash=sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8 \ + --hash=sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4 \ + --hash=sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281 \ + --hash=sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744 \ + --hash=sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69 \ + --hash=sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13 \ + --hash=sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140 \ + --hash=sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e \ + --hash=sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e \ + --hash=sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc \ + --hash=sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff \ + --hash=sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec \ + --hash=sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2 \ + --hash=sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222 \ + --hash=sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106 \ + --hash=sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272 \ + --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ + --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 # via nox -virtualenv==20.26.3 \ - --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ - --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 +virtualenv==20.28.0 \ + --hash=sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0 \ + --hash=sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa # via nox diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg index dcab742e08..d3e42b1bb7 100644 --- a/.kokoro/docs/common.cfg +++ b/.kokoro/docs/common.cfg @@ -30,9 +30,9 @@ env_vars: { env_vars: { key: "V2_STAGING_BUCKET" - # Push non-cloud library docs to `docs-staging-v2-staging` instead of the + # Push non-cloud library docs to `docs-staging-v2-dev` instead of the # Cloud RAD bucket `docs-staging-v2` - value: "docs-staging-v2-staging" + value: "docs-staging-v2-dev" } # It will upload the docker image after successful builds. @@ -64,4 +64,4 @@ before_action { keyname: "docuploader_service_account" } } -} \ No newline at end of file +} diff --git a/.kokoro/release.sh b/.kokoro/release.sh index b9f654279f..1d09c988b4 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -23,7 +23,7 @@ python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source / export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1") +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-3") cd github/gapic-generator-python python3 setup.py sdist bdist_wheel twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index 1f5dced3c1..68cddd5f08 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -28,17 +28,11 @@ before_action { fetch_keystore { keystore_resource { keystore_config_id: 73713 - keyname: "google-cloud-pypi-token-keystore-1" + keyname: "google-cloud-pypi-token-keystore-3" } } } -# Tokens needed to report release status back to GitHub -env_vars: { - key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" -} - # Store the packages we uploaded to PyPI. That way, we have a record of exactly # what we published, which we can use to generate SBOMs and attestations. action { diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 9622baf0ba..006d8ef931 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -4,79 +4,94 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.4.0 \ - --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ - --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f +argcomplete==3.5.1 \ + --hash=sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363 \ + --hash=sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4 # via nox -attrs==23.2.0 \ - --hash=sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30 \ - --hash=sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1 +attrs==24.2.0 \ + --hash=sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346 \ + --hash=sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2 # via gcp-releasetool backports-tarfile==1.2.0 \ --hash=sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34 \ --hash=sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991 # via jaraco-context -cachetools==5.3.3 \ - --hash=sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945 \ - --hash=sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105 +cachetools==5.5.0 \ + --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ + --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a # via google-auth -certifi==2024.7.4 \ - --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \ - --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90 +certifi==2024.8.30 \ + --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ + --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 # via requests -cffi==1.16.0 \ - --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ - --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \ - --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \ - --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \ - --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \ - --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \ - --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \ - --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \ - --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \ - --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \ - --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \ - --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \ - --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \ - --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \ - --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \ - --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \ - --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \ - --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \ - --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \ - --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \ - --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \ - --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \ - --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \ - --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \ - --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \ - --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \ - --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \ - --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \ - --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \ - --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \ - --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \ - --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \ - --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \ - --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \ - --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \ - --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \ - --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \ - --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \ - --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \ - --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \ - --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \ - --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \ - --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \ - --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \ - --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \ - --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \ - --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \ - --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \ - --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \ - --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \ - --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \ - --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 +cffi==1.17.1 \ + --hash=sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8 \ + --hash=sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2 \ + --hash=sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1 \ + --hash=sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15 \ + --hash=sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36 \ + --hash=sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824 \ + --hash=sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8 \ + --hash=sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36 \ + --hash=sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17 \ + --hash=sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf \ + --hash=sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc \ + --hash=sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3 \ + --hash=sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed \ + --hash=sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702 \ + --hash=sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1 \ + --hash=sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8 \ + --hash=sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903 \ + --hash=sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6 \ + --hash=sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d \ + --hash=sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b \ + --hash=sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e \ + --hash=sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be \ + --hash=sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c \ + --hash=sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683 \ + --hash=sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9 \ + --hash=sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c \ + --hash=sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8 \ + --hash=sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1 \ + --hash=sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4 \ + --hash=sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655 \ + --hash=sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67 \ + --hash=sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595 \ + --hash=sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0 \ + --hash=sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65 \ + --hash=sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41 \ + --hash=sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6 \ + --hash=sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401 \ + --hash=sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6 \ + --hash=sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3 \ + --hash=sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16 \ + --hash=sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93 \ + --hash=sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e \ + --hash=sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4 \ + --hash=sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964 \ + --hash=sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c \ + --hash=sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576 \ + --hash=sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0 \ + --hash=sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3 \ + --hash=sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662 \ + --hash=sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3 \ + --hash=sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff \ + --hash=sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5 \ + --hash=sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd \ + --hash=sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f \ + --hash=sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5 \ + --hash=sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14 \ + --hash=sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d \ + --hash=sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9 \ + --hash=sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7 \ + --hash=sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382 \ + --hash=sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a \ + --hash=sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e \ + --hash=sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a \ + --hash=sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4 \ + --hash=sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99 \ + --hash=sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87 \ + --hash=sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b # via cryptography charset-normalizer==2.1.1 \ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ @@ -97,72 +112,67 @@ colorlog==6.8.2 \ # via # gcp-docuploader # nox -cryptography==42.0.8 \ - --hash=sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad \ - --hash=sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583 \ - --hash=sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b \ - --hash=sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c \ - --hash=sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1 \ - --hash=sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648 \ - --hash=sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949 \ - --hash=sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba \ - --hash=sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c \ - --hash=sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9 \ - --hash=sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d \ - --hash=sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c \ - --hash=sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e \ - --hash=sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2 \ - --hash=sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d \ - --hash=sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7 \ - --hash=sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70 \ - --hash=sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2 \ - --hash=sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7 \ - --hash=sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14 \ - --hash=sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe \ - --hash=sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e \ - --hash=sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71 \ - --hash=sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961 \ - --hash=sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7 \ - --hash=sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c \ - --hash=sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28 \ - --hash=sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842 \ - --hash=sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902 \ - --hash=sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801 \ - --hash=sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a \ - --hash=sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e +cryptography==43.0.1 \ + --hash=sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494 \ + --hash=sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806 \ + --hash=sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d \ + --hash=sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062 \ + --hash=sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2 \ + --hash=sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4 \ + --hash=sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1 \ + --hash=sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85 \ + --hash=sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84 \ + --hash=sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042 \ + --hash=sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d \ + --hash=sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962 \ + --hash=sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2 \ + --hash=sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa \ + --hash=sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d \ + --hash=sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365 \ + --hash=sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96 \ + --hash=sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47 \ + --hash=sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d \ + --hash=sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d \ + --hash=sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c \ + --hash=sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb \ + --hash=sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277 \ + --hash=sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172 \ + --hash=sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034 \ + --hash=sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a \ + --hash=sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289 # via # -r requirements.in # gcp-releasetool # secretstorage -distlib==0.3.8 \ - --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ - --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 +distlib==0.3.9 \ + --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ + --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 # via virtualenv docutils==0.21.2 \ --hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \ --hash=sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2 # via readme-renderer -filelock==3.15.4 \ - --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ - --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 +filelock==3.16.1 \ + --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ + --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 # via virtualenv gcp-docuploader==0.6.5 \ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==2.0.1 \ - --hash=sha256:34314a910c08e8911d9c965bd44f8f2185c4f556e737d719c33a41f6a610de96 \ - --hash=sha256:b0d5863c6a070702b10883d37c4bdfd74bf930fe417f36c0c965d3b7c779ae62 +gcp-releasetool==2.1.1 \ + --hash=sha256:25639269f4eae510094f9dbed9894977e1966933211eb155a451deebc3fc0b30 \ + --hash=sha256:845f4ded3d9bfe8cc7fdaad789e83f4ea014affa77785259a7ddac4b243e099e # via -r requirements.in -google-api-core==2.19.1 \ - --hash=sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125 \ - --hash=sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd +google-api-core==2.21.0 \ + --hash=sha256:4a152fd11a9f774ea606388d423b68aa7e6d6a0ffe4c8266f74979613ec09f81 \ + --hash=sha256:6869eacb2a37720380ba5898312af79a4d30b8bca1548fb4093e0697dc4bdf5d # via # google-cloud-core # google-cloud-storage -google-auth==2.31.0 \ - --hash=sha256:042c4702efa9f7d3c48d3a69341c209381b125faa6dbf3ebe56bc7e40ae05c23 \ - --hash=sha256:87805c36970047247c8afe614d4e3af8eceafc1ebba0c679fe75ddd1d575e871 +google-auth==2.35.0 \ + --hash=sha256:25df55f327ef021de8be50bad0dfd4a916ad0de96da86cd05661c9297723ad3f \ + --hash=sha256:f4c64ed4e01e8e8b646ef34c018f8bf3338df0c8e37d8b3bba40e7f574a3278a # via # gcp-releasetool # google-api-core @@ -172,97 +182,56 @@ google-cloud-core==2.4.1 \ --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 # via google-cloud-storage -google-cloud-storage==2.17.0 \ - --hash=sha256:49378abff54ef656b52dca5ef0f2eba9aa83dc2b2c72c78714b03a1a95fe9388 \ - --hash=sha256:5b393bc766b7a3bc6f5407b9e665b2450d36282614b7945e570b3480a456d1e1 +google-cloud-storage==2.18.2 \ + --hash=sha256:97a4d45c368b7d401ed48c4fdfe86e1e1cb96401c9e199e419d289e2c0370166 \ + --hash=sha256:aaf7acd70cdad9f274d29332673fcab98708d0e1f4dceb5a5356aaef06af4d99 # via gcp-docuploader -google-crc32c==1.5.0 \ - --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ - --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \ - --hash=sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c \ - --hash=sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289 \ - --hash=sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298 \ - --hash=sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 \ - --hash=sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f \ - --hash=sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2 \ - --hash=sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a \ - --hash=sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb \ - --hash=sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210 \ - --hash=sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5 \ - --hash=sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee \ - --hash=sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c \ - --hash=sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a \ - --hash=sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314 \ - --hash=sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd \ - --hash=sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65 \ - --hash=sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37 \ - --hash=sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4 \ - --hash=sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13 \ - --hash=sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894 \ - --hash=sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31 \ - --hash=sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e \ - --hash=sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709 \ - --hash=sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740 \ - --hash=sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc \ - --hash=sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d \ - --hash=sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c \ - --hash=sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c \ - --hash=sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d \ - --hash=sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906 \ - --hash=sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61 \ - --hash=sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57 \ - --hash=sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c \ - --hash=sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a \ - --hash=sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438 \ - --hash=sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946 \ - --hash=sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7 \ - --hash=sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96 \ - --hash=sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091 \ - --hash=sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae \ - --hash=sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d \ - --hash=sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88 \ - --hash=sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2 \ - --hash=sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd \ - --hash=sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541 \ - --hash=sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728 \ - --hash=sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178 \ - --hash=sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968 \ - --hash=sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346 \ - --hash=sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8 \ - --hash=sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93 \ - --hash=sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7 \ - --hash=sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273 \ - --hash=sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462 \ - --hash=sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94 \ - --hash=sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd \ - --hash=sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e \ - --hash=sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57 \ - --hash=sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b \ - --hash=sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9 \ - --hash=sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a \ - --hash=sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100 \ - --hash=sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325 \ - --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ - --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ - --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 +google-crc32c==1.6.0 \ + --hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \ + --hash=sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d \ + --hash=sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e \ + --hash=sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57 \ + --hash=sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2 \ + --hash=sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8 \ + --hash=sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc \ + --hash=sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42 \ + --hash=sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f \ + --hash=sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa \ + --hash=sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b \ + --hash=sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc \ + --hash=sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760 \ + --hash=sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d \ + --hash=sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7 \ + --hash=sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d \ + --hash=sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0 \ + --hash=sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3 \ + --hash=sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3 \ + --hash=sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00 \ + --hash=sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871 \ + --hash=sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c \ + --hash=sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9 \ + --hash=sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205 \ + --hash=sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc \ + --hash=sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d \ + --hash=sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4 # via # google-cloud-storage # google-resumable-media -google-resumable-media==2.7.1 \ - --hash=sha256:103ebc4ba331ab1bfdac0250f8033627a2cd7cde09e7ccff9181e31ba4315b2c \ - --hash=sha256:eae451a7b2e2cdbaaa0fd2eb00cc8a1ee5e95e16b55597359cbc3d27d7d90e33 +google-resumable-media==2.7.2 \ + --hash=sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa \ + --hash=sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0 # via google-cloud-storage -googleapis-common-protos==1.63.2 \ - --hash=sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945 \ - --hash=sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87 +googleapis-common-protos==1.65.0 \ + --hash=sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63 \ + --hash=sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0 # via google-api-core -idna==3.7 \ - --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ - --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 +idna==3.10 \ + --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ + --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 # via requests -importlib-metadata==8.0.0 \ - --hash=sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f \ - --hash=sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812 +importlib-metadata==8.5.0 \ + --hash=sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b \ + --hash=sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7 # via # -r requirements.in # keyring @@ -271,13 +240,13 @@ jaraco-classes==3.4.0 \ --hash=sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd \ --hash=sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790 # via keyring -jaraco-context==5.3.0 \ - --hash=sha256:3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266 \ - --hash=sha256:c2f67165ce1f9be20f32f650f25d8edfc1646a8aeee48ae06fb35f90763576d2 +jaraco-context==6.0.1 \ + --hash=sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3 \ + --hash=sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4 # via keyring -jaraco-functools==4.0.1 \ - --hash=sha256:3b24ccb921d6b593bdceb56ce14799204f473976e2a9d4b15b04d0f2c2326664 \ - --hash=sha256:d33fa765374c0611b52f8b3a795f8900869aa88c84769d4d1746cd68fb28c3e8 +jaraco-functools==4.1.0 \ + --hash=sha256:70f7e0e2ae076498e212562325e805204fc092d7b4c17e0e86c959e249701a9d \ + --hash=sha256:ad159f13428bc4acbf5541ad6dec511f91573b90fba04df61dafa2a1231cf649 # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -289,9 +258,9 @@ jinja2==3.1.4 \ --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d # via gcp-releasetool -keyring==25.2.1 \ - --hash=sha256:2458681cdefc0dbc0b7eb6cf75d0b98e59f9ad9b2d4edd319d18f68bdca95e50 \ - --hash=sha256:daaffd42dbda25ddafb1ad5fec4024e5bbcfe424597ca1ca452b299861e49f1b +keyring==25.4.1 \ + --hash=sha256:5426f817cf7f6f007ba5ec722b1bcad95a75b27d780343772ad76b17cb47b0bf \ + --hash=sha256:b07ebc55f3e8ed86ac81dd31ef14e81ace9dd9c3d4b5d77a6e9a2016d0d71a1b # via # gcp-releasetool # twine @@ -299,75 +268,76 @@ markdown-it-py==3.0.0 \ --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb # via rich -markupsafe==2.1.5 \ - --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ - --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \ - --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \ - --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \ - --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \ - --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \ - --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \ - --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \ - --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \ - --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \ - --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \ - --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \ - --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \ - --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \ - --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \ - --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \ - --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \ - --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \ - --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \ - --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \ - --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \ - --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \ - --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \ - --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \ - --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \ - --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \ - --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \ - --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \ - --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \ - --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \ - --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \ - --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \ - --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \ - --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \ - --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \ - --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \ - --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \ - --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \ - --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \ - --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \ - --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \ - --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \ - --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \ - --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \ - --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \ - --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \ - --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \ - --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \ - --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \ - --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \ - --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \ - --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \ - --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \ - --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \ - --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \ - --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \ - --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \ - --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \ - --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \ - --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68 +markupsafe==3.0.1 \ + --hash=sha256:0778de17cff1acaeccc3ff30cd99a3fd5c50fc58ad3d6c0e0c4c58092b859396 \ + --hash=sha256:0f84af7e813784feb4d5e4ff7db633aba6c8ca64a833f61d8e4eade234ef0c38 \ + --hash=sha256:17b2aea42a7280db02ac644db1d634ad47dcc96faf38ab304fe26ba2680d359a \ + --hash=sha256:242d6860f1fd9191aef5fae22b51c5c19767f93fb9ead4d21924e0bcb17619d8 \ + --hash=sha256:244dbe463d5fb6d7ce161301a03a6fe744dac9072328ba9fc82289238582697b \ + --hash=sha256:26627785a54a947f6d7336ce5963569b5d75614619e75193bdb4e06e21d447ad \ + --hash=sha256:2a4b34a8d14649315c4bc26bbfa352663eb51d146e35eef231dd739d54a5430a \ + --hash=sha256:2ae99f31f47d849758a687102afdd05bd3d3ff7dbab0a8f1587981b58a76152a \ + --hash=sha256:312387403cd40699ab91d50735ea7a507b788091c416dd007eac54434aee51da \ + --hash=sha256:3341c043c37d78cc5ae6e3e305e988532b072329639007fd408a476642a89fd6 \ + --hash=sha256:33d1c36b90e570ba7785dacd1faaf091203d9942bc036118fab8110a401eb1a8 \ + --hash=sha256:3e683ee4f5d0fa2dde4db77ed8dd8a876686e3fc417655c2ece9a90576905344 \ + --hash=sha256:3ffb4a8e7d46ed96ae48805746755fadd0909fea2306f93d5d8233ba23dda12a \ + --hash=sha256:40621d60d0e58aa573b68ac5e2d6b20d44392878e0bfc159012a5787c4e35bc8 \ + --hash=sha256:40f1e10d51c92859765522cbd79c5c8989f40f0419614bcdc5015e7b6bf97fc5 \ + --hash=sha256:45d42d132cff577c92bfba536aefcfea7e26efb975bd455db4e6602f5c9f45e7 \ + --hash=sha256:48488d999ed50ba8d38c581d67e496f955821dc183883550a6fbc7f1aefdc170 \ + --hash=sha256:4935dd7883f1d50e2ffecca0aa33dc1946a94c8f3fdafb8df5c330e48f71b132 \ + --hash=sha256:4c2d64fdba74ad16138300815cfdc6ab2f4647e23ced81f59e940d7d4a1469d9 \ + --hash=sha256:4c8817557d0de9349109acb38b9dd570b03cc5014e8aabf1cbddc6e81005becd \ + --hash=sha256:4ffaaac913c3f7345579db4f33b0020db693f302ca5137f106060316761beea9 \ + --hash=sha256:5a4cb365cb49b750bdb60b846b0c0bc49ed62e59a76635095a179d440540c346 \ + --hash=sha256:62fada2c942702ef8952754abfc1a9f7658a4d5460fabe95ac7ec2cbe0d02abc \ + --hash=sha256:67c519635a4f64e495c50e3107d9b4075aec33634272b5db1cde839e07367589 \ + --hash=sha256:6a54c43d3ec4cf2a39f4387ad044221c66a376e58c0d0e971d47c475ba79c6b5 \ + --hash=sha256:7044312a928a66a4c2a22644147bc61a199c1709712069a344a3fb5cfcf16915 \ + --hash=sha256:730d86af59e0e43ce277bb83970530dd223bf7f2a838e086b50affa6ec5f9295 \ + --hash=sha256:800100d45176652ded796134277ecb13640c1a537cad3b8b53da45aa96330453 \ + --hash=sha256:80fcbf3add8790caddfab6764bde258b5d09aefbe9169c183f88a7410f0f6dea \ + --hash=sha256:82b5dba6eb1bcc29cc305a18a3c5365d2af06ee71b123216416f7e20d2a84e5b \ + --hash=sha256:852dc840f6d7c985603e60b5deaae1d89c56cb038b577f6b5b8c808c97580f1d \ + --hash=sha256:8ad4ad1429cd4f315f32ef263c1342166695fad76c100c5d979c45d5570ed58b \ + --hash=sha256:8ae369e84466aa70f3154ee23c1451fda10a8ee1b63923ce76667e3077f2b0c4 \ + --hash=sha256:93e8248d650e7e9d49e8251f883eed60ecbc0e8ffd6349e18550925e31bd029b \ + --hash=sha256:973a371a55ce9ed333a3a0f8e0bcfae9e0d637711534bcb11e130af2ab9334e7 \ + --hash=sha256:9ba25a71ebf05b9bb0e2ae99f8bc08a07ee8e98c612175087112656ca0f5c8bf \ + --hash=sha256:a10860e00ded1dd0a65b83e717af28845bb7bd16d8ace40fe5531491de76b79f \ + --hash=sha256:a4792d3b3a6dfafefdf8e937f14906a51bd27025a36f4b188728a73382231d91 \ + --hash=sha256:a7420ceda262dbb4b8d839a4ec63d61c261e4e77677ed7c66c99f4e7cb5030dd \ + --hash=sha256:ad91738f14eb8da0ff82f2acd0098b6257621410dcbd4df20aaa5b4233d75a50 \ + --hash=sha256:b6a387d61fe41cdf7ea95b38e9af11cfb1a63499af2759444b99185c4ab33f5b \ + --hash=sha256:b954093679d5750495725ea6f88409946d69cfb25ea7b4c846eef5044194f583 \ + --hash=sha256:bbde71a705f8e9e4c3e9e33db69341d040c827c7afa6789b14c6e16776074f5a \ + --hash=sha256:beeebf760a9c1f4c07ef6a53465e8cfa776ea6a2021eda0d0417ec41043fe984 \ + --hash=sha256:c91b394f7601438ff79a4b93d16be92f216adb57d813a78be4446fe0f6bc2d8c \ + --hash=sha256:c97ff7fedf56d86bae92fa0a646ce1a0ec7509a7578e1ed238731ba13aabcd1c \ + --hash=sha256:cb53e2a99df28eee3b5f4fea166020d3ef9116fdc5764bc5117486e6d1211b25 \ + --hash=sha256:cbf445eb5628981a80f54087f9acdbf84f9b7d862756110d172993b9a5ae81aa \ + --hash=sha256:d06b24c686a34c86c8c1fba923181eae6b10565e4d80bdd7bc1c8e2f11247aa4 \ + --hash=sha256:d98e66a24497637dd31ccab090b34392dddb1f2f811c4b4cd80c230205c074a3 \ + --hash=sha256:db15ce28e1e127a0013dfb8ac243a8e392db8c61eae113337536edb28bdc1f97 \ + --hash=sha256:db842712984e91707437461930e6011e60b39136c7331e971952bb30465bc1a1 \ + --hash=sha256:e24bfe89c6ac4c31792793ad9f861b8f6dc4546ac6dc8f1c9083c7c4f2b335cd \ + --hash=sha256:e81c52638315ff4ac1b533d427f50bc0afc746deb949210bc85f05d4f15fd772 \ + --hash=sha256:e9393357f19954248b00bed7c56f29a25c930593a77630c719653d51e7669c2a \ + --hash=sha256:ee3941769bd2522fe39222206f6dd97ae83c442a94c90f2b7a25d847d40f4729 \ + --hash=sha256:f31ae06f1328595d762c9a2bf29dafd8621c7d3adc130cbb46278079758779ca \ + --hash=sha256:f94190df587738280d544971500b9cafc9b950d32efcb1fba9ac10d84e6aa4e6 \ + --hash=sha256:fa7d686ed9883f3d664d39d5a8e74d3c5f63e603c2e3ff0abcba23eac6542635 \ + --hash=sha256:fb532dd9900381d2e8f48172ddc5a59db4c445a11b9fab40b3b786da40d3b56b \ + --hash=sha256:fe32482b37b4b00c7a52a07211b479653b7fe4f22b2e481b9a9b099d8a430f2f # via jinja2 mdurl==0.1.2 \ --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba # via markdown-it-py -more-itertools==10.3.0 \ - --hash=sha256:e5d93ef411224fbcef366a6e8ddc4c5781bc6359d43412a65dd5964e46111463 \ - --hash=sha256:ea6a02e24a9161e51faad17a8782b92a0df82c12c1c8886fec7f0c3fa1a1b320 +more-itertools==10.5.0 \ + --hash=sha256:037b0d3203ce90cca8ab1defbbdac29d5f993fc20131f3664dc8d6acfa872aef \ + --hash=sha256:5482bfef7849c25dc3c6dd53a6173ae4795da2a41a80faea6700d9f5846c5da6 # via # jaraco-classes # jaraco-functools @@ -389,9 +359,9 @@ nh3==0.2.18 \ --hash=sha256:de3ceed6e661954871d6cd78b410213bdcb136f79aafe22aa7182e028b8c7307 \ --hash=sha256:f0eca9ca8628dbb4e916ae2491d72957fdd35f7a5d326b7032a345f111ac07fe # via readme-renderer -nox==2024.4.15 \ - --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ - --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f +nox==2024.10.9 \ + --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ + --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 # via -r requirements.in packaging==24.1 \ --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ @@ -403,41 +373,41 @@ pkginfo==1.10.0 \ --hash=sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297 \ --hash=sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097 # via twine -platformdirs==4.2.2 \ - --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ - --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 +platformdirs==4.3.6 \ + --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ + --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb # via virtualenv proto-plus==1.24.0 \ --hash=sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445 \ --hash=sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12 # via google-api-core -protobuf==5.27.2 \ - --hash=sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505 \ - --hash=sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b \ - --hash=sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38 \ - --hash=sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863 \ - --hash=sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470 \ - --hash=sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6 \ - --hash=sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce \ - --hash=sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca \ - --hash=sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5 \ - --hash=sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e \ - --hash=sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714 +protobuf==5.28.2 \ + --hash=sha256:2c69461a7fcc8e24be697624c09a839976d82ae75062b11a0972e41fd2cd9132 \ + --hash=sha256:35cfcb15f213449af7ff6198d6eb5f739c37d7e4f1c09b5d0641babf2cc0c68f \ + --hash=sha256:52235802093bd8a2811abbe8bf0ab9c5f54cca0a751fdd3f6ac2a21438bffece \ + --hash=sha256:59379674ff119717404f7454647913787034f03fe7049cbef1d74a97bb4593f0 \ + --hash=sha256:5e8a95246d581eef20471b5d5ba010d55f66740942b95ba9b872d918c459452f \ + --hash=sha256:87317e9bcda04a32f2ee82089a204d3a2f0d3c8aeed16568c7daf4756e4f1fe0 \ + --hash=sha256:8ddc60bf374785fb7cb12510b267f59067fa10087325b8e1855b898a0d81d276 \ + --hash=sha256:a8b9403fc70764b08d2f593ce44f1d2920c5077bf7d311fefec999f8c40f78b7 \ + --hash=sha256:c0ea0123dac3399a2eeb1a1443d82b7afc9ff40241433296769f7da42d142ec3 \ + --hash=sha256:ca53faf29896c526863366a52a8f4d88e69cd04ec9571ed6082fa117fac3ab36 \ + --hash=sha256:eeea10f3dc0ac7e6b4933d32db20662902b4ab81bf28df12218aa389e9c2102d # via # gcp-docuploader # gcp-releasetool # google-api-core # googleapis-common-protos # proto-plus -pyasn1==0.6.0 \ - --hash=sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c \ - --hash=sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473 +pyasn1==0.6.1 \ + --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \ + --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034 # via # pyasn1-modules # rsa -pyasn1-modules==0.4.0 \ - --hash=sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6 \ - --hash=sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b +pyasn1-modules==0.4.1 \ + --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ + --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c # via google-auth pycparser==2.22 \ --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ @@ -449,9 +419,9 @@ pygments==2.18.0 \ # via # readme-renderer # rich -pyjwt==2.8.0 \ - --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ - --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 +pyjwt==2.9.0 \ + --hash=sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850 \ + --hash=sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c # via gcp-releasetool pyperclip==1.9.0 \ --hash=sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310 @@ -481,9 +451,9 @@ rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==13.7.1 \ - --hash=sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222 \ - --hash=sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432 +rich==13.9.2 \ + --hash=sha256:51a2c62057461aaf7152b4d611168f93a9fc73068f8ded2790f29fe2b5366d0c \ + --hash=sha256:8c82a3d3f8dcfe9e734771313e606b39d8247bb6b826e196f4914b333b743cf1 # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -499,9 +469,9 @@ six==1.16.0 \ # via # gcp-docuploader # python-dateutil -tomli==2.0.1 \ - --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ - --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f +tomli==2.0.2 \ + --hash=sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38 \ + --hash=sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed # via nox twine==5.1.1 \ --hash=sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997 \ @@ -510,28 +480,30 @@ twine==5.1.1 \ typing-extensions==4.12.2 \ --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 - # via -r requirements.in -urllib3==2.2.2 \ - --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ - --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 + # via + # -r requirements.in + # rich +urllib3==2.2.3 \ + --hash=sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac \ + --hash=sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9 # via # requests # twine -virtualenv==20.26.3 \ - --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ - --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 +virtualenv==20.26.6 \ + --hash=sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48 \ + --hash=sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2 # via nox -wheel==0.43.0 \ - --hash=sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85 \ - --hash=sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81 +wheel==0.44.0 \ + --hash=sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f \ + --hash=sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49 # via -r requirements.in -zipp==3.19.2 \ - --hash=sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19 \ - --hash=sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c +zipp==3.20.2 \ + --hash=sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350 \ + --hash=sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==70.2.0 \ - --hash=sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05 \ - --hash=sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1 +setuptools==75.1.0 \ + --hash=sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2 \ + --hash=sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538 # via -r requirements.in diff --git a/CHANGELOG.md b/CHANGELOG.md index 3e62651ae5..8f25cb72b2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,75 @@ # Changelog +## [1.21.0](https://github.com/googleapis/gapic-generator-python/compare/v1.20.2...v1.21.0) (2024-12-11) + + +### Features + +* Add client debug logging support for async gRPC ([#2291](https://github.com/googleapis/gapic-generator-python/issues/2291)) ([f45935a](https://github.com/googleapis/gapic-generator-python/commit/f45935a4d760a36bf989ed79bfd02aa7ec203468)) +* Add client logging support for sync gRPC ([#2284](https://github.com/googleapis/gapic-generator-python/issues/2284)) ([dddf797](https://github.com/googleapis/gapic-generator-python/commit/dddf797a1ec7bf0496b4b4c75f8d37faa753c824)) +* Add debug log when creating client ([#2265](https://github.com/googleapis/gapic-generator-python/issues/2265)) ([8be95a2](https://github.com/googleapis/gapic-generator-python/commit/8be95a2f4749a2882117154aa655c0a9d71cdc50)) +* Add debug log when sending requests via REST ([#2270](https://github.com/googleapis/gapic-generator-python/issues/2270)) ([4cb1fa2](https://github.com/googleapis/gapic-generator-python/commit/4cb1fa2452ad5ba59b34c9d25cb3ca0c635059ac)) + + +### Bug Fixes + +* Fix typing issue with gRPC metadata when key ends in -bin ([#2251](https://github.com/googleapis/gapic-generator-python/issues/2251)) ([8b3b80f](https://github.com/googleapis/gapic-generator-python/commit/8b3b80f4b55c295e5d13084284ff0e2a72b2e993)) +* **log:** Preserve dict of rest async response headers ([#2288](https://github.com/googleapis/gapic-generator-python/issues/2288)) ([b10cc21](https://github.com/googleapis/gapic-generator-python/commit/b10cc21daf7d17567119f5c9b33d98fe18072eb4)) + +## [1.20.2](https://github.com/googleapis/gapic-generator-python/compare/v1.20.1...v1.20.2) (2024-10-30) + + +### Bug Fixes + +* Disable universe-domain validation ([#2236](https://github.com/googleapis/gapic-generator-python/issues/2236)) ([ecaa41e](https://github.com/googleapis/gapic-generator-python/commit/ecaa41e7984a8aa2244138cce99cb91a87872c54)) + +## [1.20.1](https://github.com/googleapis/gapic-generator-python/compare/v1.20.0...v1.20.1) (2024-10-25) + + +### Bug Fixes + +* Allow google-cloud-documentai 3.x ([#2237](https://github.com/googleapis/gapic-generator-python/issues/2237)) ([946adf1](https://github.com/googleapis/gapic-generator-python/commit/946adf16d8a1cf83019eaa9b6a9e8b1baf95159d)) + +## [1.20.0](https://github.com/googleapis/gapic-generator-python/compare/v1.19.1...v1.20.0) (2024-10-23) + + +### Features + +* Add support for Python 3.13 ([#2215](https://github.com/googleapis/gapic-generator-python/issues/2215)) ([4e1f9c6](https://github.com/googleapis/gapic-generator-python/commit/4e1f9c623065e5917dbd1d2178228776b7ea536d)) + + +### Bug Fixes + +* Added underscores in services/types in index.rst.j2 ([#2232](https://github.com/googleapis/gapic-generator-python/issues/2232)) ([f2053ee](https://github.com/googleapis/gapic-generator-python/commit/f2053ee04127f1f0d23fd04438ee4607ee1ce76c)) +* Allow `google-cloud-kms` 3.x ([#2226](https://github.com/googleapis/gapic-generator-python/issues/2226)) ([5e07501](https://github.com/googleapis/gapic-generator-python/commit/5e075016a2119782e611cd51335fa0af7e4c18c2)) + +## [1.19.1](https://github.com/googleapis/gapic-generator-python/compare/v1.19.0...v1.19.1) (2024-10-10) + + +### Bug Fixes + +* Add default library settings for incorrect lib version ([#2212](https://github.com/googleapis/gapic-generator-python/issues/2212)) ([de46272](https://github.com/googleapis/gapic-generator-python/commit/de46272ae65e9117be7f362355cefd28d0780917)) +* Resolve issue with wait operation mixin ([#2218](https://github.com/googleapis/gapic-generator-python/issues/2218)) ([095d060](https://github.com/googleapis/gapic-generator-python/commit/095d0600803dace8d665fee9ccbc460720b5fe17)) +* Use disambiguated name for rpcs to avoid collisions ([#2217](https://github.com/googleapis/gapic-generator-python/issues/2217)) ([296cd3e](https://github.com/googleapis/gapic-generator-python/commit/296cd3e814ba58954c16ca6256db0359bcab0f09)) + +## [1.19.0](https://github.com/googleapis/gapic-generator-python/compare/v1.18.5...v1.19.0) (2024-10-09) + + +### Features + +* Add async rest transport support in gapics ([#2164](https://github.com/googleapis/gapic-generator-python/issues/2164)) ([2949465](https://github.com/googleapis/gapic-generator-python/commit/29494651fb39719af920ee1c114c82bd903e544b)) +* Add support for reading ClientLibrarySettings from service configuration YAML ([#2098](https://github.com/googleapis/gapic-generator-python/issues/2098)) ([11e3967](https://github.com/googleapis/gapic-generator-python/commit/11e3967b6a3b1e86f5ec0f5387bd340e3a8ae9d0)) +* Implement async rest transport constructor ([#2123](https://github.com/googleapis/gapic-generator-python/issues/2123)) ([2809753](https://github.com/googleapis/gapic-generator-python/commit/28097536e1a47063a5d3211e9c1c498a1f06c724)) +* Leverage async anonymous credentials in tests ([#2105](https://github.com/googleapis/gapic-generator-python/issues/2105)) ([4afac87](https://github.com/googleapis/gapic-generator-python/commit/4afac87efc8fcdd7090d003b6247be64071b611d)) + + +### Bug Fixes + +* Add support for field with name 'self' ([#2205](https://github.com/googleapis/gapic-generator-python/issues/2205)) ([ed88fe2](https://github.com/googleapis/gapic-generator-python/commit/ed88fe298647cdad310a5341f931a0de42f1b81e)) +* Resolve issue where explicit routing metadata was not sent in async clients ([#2133](https://github.com/googleapis/gapic-generator-python/issues/2133)) ([c222b12](https://github.com/googleapis/gapic-generator-python/commit/c222b125d741426259d82e726c0c07397d099a8a)) +* Streaming for sync REST API calls ([#2204](https://github.com/googleapis/gapic-generator-python/issues/2204)) ([ce3b84c](https://github.com/googleapis/gapic-generator-python/commit/ce3b84c67a31785f45eb46f154ef08af6edc9a36)) + ## [1.18.5](https://github.com/googleapis/gapic-generator-python/compare/v1.18.4...v1.18.5) (2024-08-06) diff --git a/Dockerfile b/Dockerfile index beda35897b..71a201f1b9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.12-slim +FROM python:3.13-slim # Install system packages. RUN apt-get update \ diff --git a/WORKSPACE b/WORKSPACE index 0828e20717..86d90144c6 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -60,9 +60,9 @@ gapic_generator_python() gapic_generator_register_toolchains() -_grpc_version = "1.64.2" +_grpc_version = "1.67.1" -_grpc_sha256 = "8579095a31e280d0c5fcc81ea0a2a0efb2900dbfbac0eb018a961a5be22e076e" +_grpc_sha256 = "f83aedc91b84d4c396d30b0b2a30f7113c651fe5bc180c8ac08a5f0ff7dcffd2" http_archive( name = "com_github_grpc_grpc", @@ -73,14 +73,23 @@ http_archive( # instantiated in grpc_deps(). http_archive( name = "com_google_protobuf", - sha256 = "13e7749c30bc24af6ee93e092422f9dc08491c7097efa69461f88eb5f61805ce", - strip_prefix = "protobuf-28.0", - urls = ["https://github.com/protocolbuffers/protobuf/archive/v28.0.tar.gz"], + sha256 = "63150aba23f7a90fd7d87bdf514e459dd5fe7023fdde01b56ac53335df64d4bd", + strip_prefix = "protobuf-29.2", + urls = ["https://github.com/protocolbuffers/protobuf/archive/v29.2.tar.gz"], ) load("@com_github_grpc_grpc//bazel:grpc_deps.bzl", "grpc_deps") grpc_deps() +# Pin the version of rules_cc to the version that is present in +# https://github.com/protocolbuffers/protobuf/blob/29.x/protobuf_deps.bzl#L92-L98 +http_archive( + name = "rules_cc", + urls = ["https://github.com/bazelbuild/rules_cc/releases/download/0.0.16/rules_cc-0.0.16.tar.gz"], + sha256 = "bbf1ae2f83305b7053b11e4467d317a7ba3517a12cef608543c1b1c5bf48a4df", + strip_prefix = "rules_cc-0.0.16", +) + load("@com_google_protobuf//:protobuf_deps.bzl", "protobuf_deps", "PROTOBUF_MAVEN_ARTIFACTS") # This is actually already done within grpc_deps but calling this for Bazel convention. protobuf_deps() diff --git a/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_mixins.py.j2 b/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_mixins.py.j2 index f264ce8886..6dd04dfb18 100644 --- a/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_mixins.py.j2 +++ b/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_mixins.py.j2 @@ -32,11 +32,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -86,11 +82,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -143,11 +135,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.delete_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -195,11 +183,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -250,11 +234,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.wait_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.wait_operation] metadata = tuple(metadata) @@ -369,11 +349,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -489,11 +465,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -547,11 +519,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] # Certain fields should be provided within the metadata header; # add these here. @@ -604,11 +572,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_location, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_location] # Certain fields should be provided within the metadata header; # add these here. @@ -657,11 +621,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_locations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.list_locations] # Certain fields should be provided within the metadata header; # add these here. diff --git a/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_shared_macros.j2 b/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_shared_macros.j2 index e7f623cfd2..b055b9ca31 100644 --- a/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_shared_macros.j2 +++ b/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_shared_macros.j2 @@ -57,10 +57,53 @@ The `try/except` below can be removed once the minimum version of try: from google.api_core import version_header HAS_GOOGLE_API_CORE_VERSION_HEADER = True # pragma: NO COVER +{# NOTE: `pragma: NO COVER` is needed since the coverage for presubmits isn't combined. #} except ImportError: # pragma: NO COVER HAS_GOOGLE_API_CORE_VERSION_HEADER = False {% endif %}{# service_version #} {% endmacro %} + +{% macro create_metadata(method) %} + {% if method.explicit_routing %} + header_params = {} + {% if not method.client_streaming %} + {% for routing_param in method.routing_rule.routing_parameters %} + {% if routing_param.path_template %} {# Need to match. #} + + routing_param_regex = {{ routing_param.to_regex() }} + regex_match = routing_param_regex.match(request.{{ routing_param.field }}) + if regex_match and regex_match.group("{{ routing_param.key }}"): + header_params["{{ routing_param.key }}"] = regex_match.group("{{ routing_param.key }}") + + {% else %} + + if request.{{ routing_param.field }}: + header_params["{{ routing_param.key }}"] = request.{{ routing_param.field }} + + {% endif %} + {% endfor %} {# method.routing_rule.routing_parameters #} + {% endif %} {# if not method.client_streaming #} + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + {% elif method.field_headers %}{# implicit routing #} + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + {% if not method.client_streaming %} + {% for field_header in method.field_headers %} + ("{{ field_header.raw }}", request.{{ field_header.disambiguated }}), + {% endfor %}{# for field_header in method.field_headers #} + {% endif %}{# not method.client_streaming #} + )), + ) + {% endif %}{# method.explicit_routing #} +{% endmacro %}{# create_metadata #} + {% macro add_api_version_header_to_metadata(service_version) %} {# Add API Version to metadata as per https://github.com/aip-dev/google.aip.dev/pull/1331. @@ -74,3 +117,332 @@ except ImportError: # pragma: NO COVER ) {% endif %}{# service_version #} {% endmacro %} + +{% macro operations_mixin_imports(api, service, opts) %} +{% if import_ns is not defined %} +{% set import_ns = namespace(has_operations_mixin=false) %} +{% endif %}{# import_ns is not defined #} +{% set import_ns.has_operations_mixin = api.has_operations_mixin %} + +{% filter sort_lines %} +{% for method in service.methods.values() %} +{{method.input.ident.python_import}} +{% if method.output.ident|string() == "operations_pb2.Operation" %} +{% set import_ns.has_operations_mixin = True %} +{% else %} +{{method.output.ident.python_import}} +{% endif %} +{% endfor %} +{% if opts.add_iam_methods or api.has_iam_mixin %} +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +{% endif %}{# opts.add_iam_methods #} +{% endfilter %} +{% if import_ns.has_operations_mixin %} +from google.longrunning import operations_pb2 # type: ignore +{% endif %}{# import_ns.has_operations_mixin #} +{% endmacro %} + +{% macro http_options_method(rules) %} +@staticmethod +def _get_http_options(): + http_options: List[Dict[str, str]] = [ + {%- for rule in rules %}{ + 'method': '{{ rule.method }}', + 'uri': '{{ rule.uri }}', + {% if rule.body %} + 'body': '{{ rule.body }}', + {% endif %}{# rule.body #} + }, + {% endfor %}{# rule in rules #} + ] + return http_options +{% endmacro %} + +{% macro response_method(body_spec, is_async=False) %} +{% set async_prefix = "async " if is_async else "" %} +{% set await_prefix = "await " if is_async else "" %} +@staticmethod +{{ async_prefix }}def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = {{ await_prefix }}getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + {% if body_spec %} + data=body, + {% endif %} + ) + return response +{% endmacro %} + +{# rest_call_method_common includes the common code for a rest __call__ method to be + re-used for sync REST, async REST, and mixins __call__ implementation. + + Args: + body_spec (str): The http options body i.e. method.http_options[0].body + method_name (str): The method name. + service_name (str): The service name. + is_async (bool): Used to determine the code path i.e. whether for sync or async call. #} +{% macro rest_call_method_common(body_spec, method_name, service_name, is_async=False) %} +{% set await_prefix = "await " if is_async else "" %} +{% set async_class_prefix = "Async" if is_async else "" %} + + http_options = _Base{{ service_name }}RestTransport._Base{{method_name}}._get_http_options() + request, metadata = {{ await_prefix }}self._interceptor.pre_{{ method_name|snake_case }}(request, metadata) + transcoded_request = _Base{{ service_name }}RestTransport._Base{{method_name}}._get_transcoded_request(http_options, request) + + {% if body_spec %} + body = _Base{{ service_name }}RestTransport._Base{{method_name}}._get_request_body_json(transcoded_request) + {% endif %}{# body_spec #} + + # Jsonify the query params + query_params = _Base{{ service_name }}RestTransport._Base{{method_name}}._get_query_params_json(transcoded_request) + + # Send the request + response = {{ await_prefix }}{{ async_class_prefix }}{{ service_name }}RestTransport._{{method_name}}._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request{% if body_spec %}, body{% endif %}) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + {# Note: format_http_response_error takes in more parameters than from_http_response and the + latter only supports a response of type requests.Response. + TODO: Clean up the sync response error handling and use format_http_response_error. + See issue: https://github.com/googleapis/gapic-generator-python/issues/2116. #} + {% if is_async %} + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2137): Remove `type: ignore` once version check is added for google-api-core. #} + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + {% else %} + raise core_exceptions.from_http_response(response) + {% endif %}{# is_async #} + +{% endmacro %} + + +{% macro prep_wrapped_messages_async_method(api, service) %} +def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + {% for method in service.methods.values() %} + self.{{ method.transport_safe_name|snake_case }}: self._wrap_method( + self.{{ method.transport_safe_name|snake_case }}, + {% if method.retry %} + default_retry=retries.AsyncRetry( + {% if method.retry.initial_backoff %} + initial={{ method.retry.initial_backoff }}, + {% endif %} + {% if method.retry.max_backoff %} + maximum={{ method.retry.max_backoff }}, + {% endif %} + {% if method.retry.backoff_multiplier %} + multiplier={{ method.retry.backoff_multiplier }}, + {% endif %} + predicate=retries.if_exception_type( + {% for ex in method.retry.retryable_exceptions|sort(attribute='__name__') %} + core_exceptions.{{ ex.__name__ }}, + {% endfor %} + ), + deadline={{ method.timeout }}, + ), + {% endif %} + default_timeout={{ method.timeout }}, + client_info=client_info, + ), + {% endfor %}{# service.methods.values() #} + {% for method_name in api.mixin_api_methods.keys() %} + self.{{ method_name|snake_case }}: self._wrap_method( + self.{{ method_name|snake_case }}, + default_timeout=None, + client_info=client_info, + ), + {% endfor %} {# method_name in api.mixin_api_methods.keys() #} + } +{% endmacro %} + +{# TODO: This helper logic to check whether `kind` needs to be configured in wrap_method +can be removed once we require the correct version of the google-api-core dependency to +avoid having a gRPC code path in an async REST call. +See related issue: https://github.com/googleapis/python-api-core/issues/661. +In the meantime, if an older version of the dependency is installed (which has a wrap_method with +no kind parameter), then an async gRPC call will work correctly and async REST transport +will not be available as a transport. +See related issue: https://github.com/googleapis/gapic-generator-python/issues/2119. #} +{% macro wrap_async_method_macro() %} +def _wrap_method(self, func, *args, **kwargs): + {# TODO: Remove `pragma: NO COVER` once https://github.com/googleapis/python-api-core/pull/688 is merged. #} + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) +{% endmacro %} + +{# `create_interceptor_class` generates an Interceptor class for + # synchronous and asynchronous rest transports +#} +{% macro create_interceptor_class(api, service, method, is_async=False) %} +{% set async_prefix = "async " if is_async else "" %} +{% set async_method_name_prefix = "Async" if is_async else "" %} +{% set async_docstring = "Asynchronous " if is_async else "" %} +{% set async_suffix = "_async" if is_async else "" %} + +class {{ async_method_name_prefix }}{{ service.name }}RestInterceptor: + """{{ async_docstring }}Interceptor for {{ service.name }}. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the {{ async_method_name_prefix }}{{ service.name }}RestTransport. + + .. code-block:: python + class MyCustom{{ service.name }}Interceptor({{ service.name }}RestInterceptor): + {% for _, method in service.methods|dictsort if not method.client_streaming %} + {{ async_prefix }}def pre_{{ method.name|snake_case }}(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + {% if not method.void %} + {{ async_prefix }}def post_{{ method.name|snake_case }}(self, response): + logging.log(f"Received response: {response}") + return response + {% endif %} + + {% endfor %} + transport = {{ async_method_name_prefix }}{{ service.name }}RestTransport(interceptor=MyCustom{{ service.name }}Interceptor()) + client = {{ async_prefix }}{{ service.client_name }}(transport=transport) + + + """ + {% for method in service.methods.values()|sort(attribute="name") if not method.client_streaming and method.http_options %} + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2147): Remove the condition below once async rest transport supports the guarded methods. #} + {% if (not is_async) or (is_async and not method.lro and not method.extended_lro and not method.paged_result_field) %} + {{ async_prefix }}def pre_{{ method.name|snake_case }}(self, request: {{method.input.ident}}, metadata: Sequence[Tuple[str, str]]) -> Tuple[{{method.input.ident}}, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for {{ method.name|snake_case }} + + Override in a subclass to manipulate the request or metadata + before they are sent to the {{ service.name }} server. + """ + return request, metadata + + {% if not method.void %} + {% if not method.server_streaming %} + {{ async_prefix }}def post_{{ method.name|snake_case }}(self, response: {{method.output.ident}}) -> {{method.output.ident}}: + {% else %} + {{ async_prefix }}def post_{{ method.name|snake_case }}(self, response: rest_streaming{{ async_suffix }}.{{ async_method_name_prefix }}ResponseIterator) -> rest_streaming{{ async_suffix }}.{{ async_method_name_prefix }}ResponseIterator: + {% endif %} + """Post-rpc interceptor for {{ method.name|snake_case }} + + Override in a subclass to manipulate the response + after it is returned by the {{ service.name }} server but before + it is returned to user code. + """ + return response + {% endif %} + {% endif %}{# if (not is_async) or (is_async and not method.lro and not method.extended_lro and not method.paged_result_field) #} + {% endfor %} + + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2148): Remove the condition below once mixins are supported for async rest transport. #} + {% if not is_async %} + {% for name, signature in api.mixin_api_signatures.items() %} + {{ async_prefix }}def pre_{{ name|snake_case }}( + self, request: {{signature.request_type}}, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[{{signature.request_type}}, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for {{ name|snake_case }} + + Override in a subclass to manipulate the request or metadata + before they are sent to the {{ service.name }} server. + """ + return request, metadata + + {{ async_prefix }}def post_{{ name|snake_case }}( + self, response: {{signature.response_type}} + ) -> {{signature.response_type}}: + """Post-rpc interceptor for {{ name|snake_case }} + + Override in a subclass to manipulate the response + after it is returned by the {{ service.name }} server but before + it is returned to user code. + """ + return response + {% endfor %} + {% endif %} +{% endmacro %} + +{% macro generate_mixin_call_method(service, api, name, sig, is_async) %} +{% set async_prefix = "async " if is_async else "" %} +{% set async_method_name_prefix = "Async" if is_async else "" %} +{% set async_suffix = "_async" if is_async else "" %} +{% set await_prefix = "await " if is_async else "" %} + +@property +def {{ name|snake_case }}(self): + return self._{{ name }}(self._session, self._host, self._interceptor) # type: ignore + +class _{{ name }}(_Base{{ service.name }}RestTransport._Base{{name}}, {{ async_method_name_prefix }}{{service.name}}RestStub): + def __hash__(self): + return hash("{{ async_method_name_prefix }}{{ service.name }}RestTransport.{{ name }}") + + {% set body_spec = api.mixin_http_options["{}".format(name)][0].body %} + {{ response_method(body_spec) | indent(4) }} + + {{ async_prefix }}def __call__(self, + request: {{ sig.request_type }}, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> {{ sig.response_type }}: + + r"""Call the {{- ' ' -}} + {{ (name|snake_case).replace('_',' ')|wrap(width=70, offset=45, indent=8) }} + {{- ' ' -}} method over HTTP. + + Args: + request ({{ sig.request_type }}): + The request object for {{ name }} method. + retry (google.api_core.retry{{ async_suffix }}.{{ async_method_name_prefix }}Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + {% if sig.response_type != 'None' %} + + Returns: + {{ sig.response_type }}: Response from {{ name }} method. + {% endif %} + """ + {{ rest_call_method_common(body_spec, name, service.name, is_async)|indent(4) }} + + {% if sig.response_type == "None" %} + return {{ await_prefix }}self._interceptor.post_{{ name|snake_case }}(None) + {% else %} + {% if is_async %} + content = await response.read() + {% else %} + content = response.content.decode("utf-8") + {% endif %} + resp = {{ sig.response_type }}() + resp = json_format.Parse(content, resp) + resp = {{ await_prefix }}self._interceptor.post_{{ name|snake_case }}(resp) + return resp + {% endif %} + +{% endmacro %} \ No newline at end of file diff --git a/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 064e17924f..5555339c4d 100644 --- a/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -345,7 +345,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% if method.operation_service %}{# Extended Operations LRO #} def {{ method.name|snake_case }}_unary(self, {% else %} - def {{ method.name|snake_case }}(self, + def {{ method.safe_name|snake_case }}(self, {% endif %}{# Extended Operations LRO #} {% if not method.client_streaming %} request: Optional[Union[{{ method.input.ident }}, dict]] = None, diff --git a/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/_mixins.py.j2 b/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/_mixins.py.j2 index 00b0d53b71..50a49d6d00 100644 --- a/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/_mixins.py.j2 +++ b/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/_mixins.py.j2 @@ -53,7 +53,7 @@ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "delete_operation" not in self._stubs: + if "wait_operation" not in self._stubs: self._stubs["wait_operation"] = self.grpc_channel.unary_unary( "/google.longrunning.Operations/WaitOperation", request_serializer=operations_pb2.WaitOperationRequest.SerializeToString, diff --git a/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/_rest_mixins.py.j2 b/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/_rest_mixins.py.j2 new file mode 100644 index 0000000000..2603b19b48 --- /dev/null +++ b/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/_rest_mixins.py.j2 @@ -0,0 +1,30 @@ +{# + # Copyright (C) 2024 Google LLC + # + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. + # + # This file is a copy of `_rest_mixins.py.j2` in standard templates located at + # `gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2` + # It is intended to be a symlink. + # See https://github.com/googleapis/gapic-generator-python/issues/2028 + # which contains follow up work to convert it to a symlink. + # Do not diverge from the copy of `_rest_mixins.py.j2` in standard templates. +#} + +{% import "%namespace/%name/%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} + + {% if "rest" in opts.transport %} + {% for name, sig in api.mixin_api_signatures.items() %} + {{ shared_macros.generate_mixin_call_method(service, api, name, sig, is_async=False) | indent(4) }} + {% endfor %} + {% endif %} {# rest in opts.transport #} diff --git a/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/_rest_mixins_base.py.j2 b/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/_rest_mixins_base.py.j2 new file mode 100644 index 0000000000..b88489d5f1 --- /dev/null +++ b/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/_rest_mixins_base.py.j2 @@ -0,0 +1,55 @@ +{# + # Copyright (C) 2024 Google LLC + # + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. + # This file is a copy of `_rest_mixins_base.py.j2` in standard templates located at + # `gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins_base.py.j2` + # It is intended to be a symlink. + # See https://github.com/googleapis/gapic-generator-python/issues/2028 + # which contains follow up work to convert it to a symlink. + # Do not diverge from the copy of `_rest_mixins_base.py.j2` in standard templates. +#} + +{% import "%namespace/%name/%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} + + {% if "rest" in opts.transport %} + + {% for name, sig in api.mixin_api_signatures.items() %} + class _Base{{ name }}: + + {{ shared_macros.http_options_method(api.mixin_http_options["{}".format(name)])|indent(8)}} + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + {% set body_spec = api.mixin_http_options["{}".format(name)][0].body %} + {%- if body_spec %} + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request['body']) + return body + + {%- endif %} {# body_spec #} + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + {% endfor %} + {% endif %} {# rest in opts.transport #} diff --git a/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 b/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 index d4364e44f3..e9789d8a5d 100644 --- a/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 +++ b/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 @@ -150,7 +150,14 @@ class {{ service.name }}Transport(abc.ABC): default_timeout={{ method.timeout }}, client_info=client_info, ), - {% endfor %} {# precomputed wrappers loop #} + {% endfor %}{# method in service.methods.values() #} + {% for method_name in api.mixin_api_methods.keys() %} + self.{{ method_name|snake_case }}: gapic_v1.method.wrap_method( + self.{{ method_name|snake_case }}, + default_timeout=None, + client_info=client_info, + ), + {% endfor %} {# method_name in api.mixin_api_methods.keys() #} } def close(self): diff --git a/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 b/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 index 5a2d2f5b34..a55ced7c08 100644 --- a/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 +++ b/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 @@ -1,3 +1,12 @@ +{% import "%namespace/%name/%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} +{# + # This file is a copy of `rest.py.j2` in standard templates located at + # `gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2` + # It is intended to be a symlink. + # See https://github.com/googleapis/gapic-generator-python/issues/2028 + # which contains follow up work to convert it to a symlink. + # Do not diverge from the copy of `rest.py.j2` in standard templates. +#} {% extends '_base.py.j2' %} {% block content %} @@ -5,47 +14,41 @@ from google.auth.transport.requests import AuthorizedSession # type: ignore import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries from google.api_core import rest_helpers from google.api_core import rest_streaming -from google.api_core import path_template from google.api_core import gapic_v1 from google.protobuf import json_format {% if service.has_lro %} from google.api_core import operations_v1 {% endif %} +{% if opts.add_iam_methods or api.has_iam_mixin %} +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +{% endif %} +{% if api.has_location_mixin %} +from google.cloud.location import locations_pb2 # type: ignore +{% endif %} + from requests import __version__ as requests_version import dataclasses -import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings +{{ shared_macros.operations_mixin_imports(api, service, opts) }} + +from .rest_base import _Base{{ service.name }}RestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore -{# TODO(yon-mg): re-add python_import/ python_modules from removed diff/current grpc template code #} -{% filter sort_lines %} -{% for method in service.methods.values() %} -{{method.input.ident.python_import}} -{{method.output.ident.python_import}} -{% endfor %} -{% if opts.add_iam_methods %} -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -{% endif %} -{% endfilter %} - -from .base import {{service.name}}Transport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, @@ -53,63 +56,7 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( ) -class {{ service.name }}RestInterceptor: - """Interceptor for {{ service.name }}. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the {{ service.name }}RestTransport. - - .. code-block:: python - class MyCustom{{ service.name }}Interceptor({{ service.name }}RestInterceptor): - {% for _, method in service.methods|dictsort if not method.client_streaming %} - def pre_{{ method.name|snake_case }}(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - {% if not method.void %} - def post_{{ method.name|snake_case }}(self, response): - logging.log(f"Received response: {response}") - return response - {% endif %} - -{% endfor %} - transport = {{ service.name }}RestTransport(interceptor=MyCustom{{ service.name }}Interceptor()) - client = {{ service.client_name }}(transport=transport) - - - """ - {% for method in service.methods.values()|sort(attribute="name") if not method.client_streaming and method.http_options %} - def pre_{{ method.name|snake_case }}(self, request: {{method.input.ident}}, metadata: Sequence[Tuple[str, str]]) -> Tuple[{{method.input.ident}}, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for {{ method.name|snake_case }} - - Override in a subclass to manipulate the request or metadata - before they are sent to the {{ service.name }} server. - """ - return request, metadata - - {% if not method.void %} - {% if not method.server_streaming %} - def post_{{ method.name|snake_case }}(self, response: {{method.output.ident}}) -> {{method.output.ident}}: - {% else %} - def post_{{ method.name|snake_case }}(self, response: rest_streaming.ResponseIterator) -> rest_streaming.ResponseIterator: - {% endif %} - """Post-rpc interceptor for {{ method.name|snake_case }} - - Override in a subclass to manipulate the response - after it is returned by the {{ service.name }} server but before - it is returned to user code. - """ - return response - {% endif %} - - {% endfor %} +{{ shared_macros.create_interceptor_class(api, service, method, is_async=False) }} @dataclasses.dataclass @@ -119,8 +66,8 @@ class {{service.name}}RestStub: _interceptor: {{ service.name }}RestInterceptor -class {{service.name}}RestTransport({{service.name}}Transport): - """REST backend transport for {{ service.name }}. +class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): + """REST backend synchronous transport for {{ service.name }}. {{ service.meta.doc|rst(width=72, indent=4) }} @@ -129,15 +76,8 @@ class {{service.name}}RestTransport({{service.name}}Transport): and call it. It sends JSON representations of protocol buffers over HTTP/1.1 - - {% if not opts.rest_numeric_enums %} - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via an issue in this - library's source repository. Thank you! - {% endif %} """ - {# TODO(yon-mg): handle mtls stuff if that is relevant for rest transport #} def __init__(self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, @@ -151,6 +91,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): always_use_jwt_access: Optional[bool] = False, url_scheme: str = 'https', interceptor: Optional[{{ service.name }}RestInterceptor] = None, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -162,7 +103,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): Args: host ({% if service.host %}Optional[str]{% else %}str{% endif %}): - {{ ' ' }}The hostname to connect to. + {{ ' ' }}The hostname to connect to {% if service.host %}(default: '{{ service.host }}'){% endif %}. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -194,19 +135,13 @@ class {{service.name}}RestTransport({{service.name}}Transport): # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - super().__init__( host=host, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience ) self._session = AuthorizedSession( self._credentials, default_host=self.DEFAULT_HOST) @@ -263,24 +198,14 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% endif %}{# service.has_lro #} {% for method in service.methods.values()|sort(attribute="name") %} - class _{{method.name}}({{service.name}}RestStub): + class _{{method.name}}(_Base{{ service.name }}RestTransport._Base{{method.name}}, {{service.name}}RestStub): def __hash__(self): - return hash("{{method.name}}") - + return hash("{{service.name}}RestTransport.{{method.name}}") {% if method.http_options and not method.client_streaming %} - {% if method.input.required_fields %} - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - {% for req_field in method.input.required_fields if req_field.name in method.query_params %} - "{{ req_field.name | camel_case }}" : {% if req_field.field_pb.type == 9 %}"{{req_field.field_pb.default_value }}"{% elif req_field.field_pb.type in [11, 14] %}{}{% else %}{{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}{% endif %},{# default is str #} - {% endfor %} - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - {% endif %}{# required fields #} - {% endif %}{# not method.client_streaming #} + {% set body_spec = method.http_options[0].body %} + {{ shared_macros.response_method(body_spec)|indent(8) }} + {% endif %}{# method.http_options and not method.client_streaming #} def __call__(self, request: {{method.input.ident}}, *, @@ -297,7 +222,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): Args: request (~.{{ method.input.ident }}): The request object.{{ ' ' }} - {{- method.input.meta.doc|rst(width=72, indent=16) }} + {{- method.input.meta.doc|rst(width=72, indent=16, nl=False) }} retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -311,67 +236,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% endif %} """ - http_options: List[Dict[str, str]] = [ - {%- for rule in method.http_options %}{ - 'method': '{{ rule.method }}', - 'uri': '{{ rule.uri }}', - {% if rule.body %} - 'body': '{{ rule.body }}', - {% endif %}{# rule.body #} - }, - {% endfor %}{# rule in method.http_options #} - ] - request, metadata = self._interceptor.pre_{{ method.name|snake_case }}(request, metadata) - {% if method.input.ident.is_proto_plus_type %} - pb_request = {{method.input.ident}}.pb(request) - {% else %} - pb_request = request - {% endif %} - transcoded_request = path_template.transcode(http_options, pb_request) - - {% set body_spec = method.http_options[0].body %} - {%- if body_spec %} - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums={{ opts.rest_numeric_enums }} - ) - {%- endif %} - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums={{ opts.rest_numeric_enums }}, - )) - {% if method.input.required_fields %} - query_params.update(self._get_unset_required_fields(query_params)) - {% endif %}{# required fields #} - - {% if opts.rest_numeric_enums %} - query_params["$alt"] = "json;enum-encoding=int" - {% endif %} - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - {% if body_spec %} - data=body, - {% endif %} - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) + {{ shared_macros.rest_call_method_common(body_spec, method.name, service.name)|indent(8) }} {% if not method.void %} # Return the response @@ -398,7 +263,6 @@ class {{service.name}}RestTransport({{service.name}}Transport): raise NotImplementedError( "Method {{ method.name }} is not available over REST transport" ) - {% endif %}{# method.http_options and not method.client_streaming #} {% endfor %} {% for method in service.methods.values()|sort(attribute="name") %} @@ -413,6 +277,13 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% endfor %} + {% include '%namespace/%name/%version/%sub/services/%service/transports/_rest_mixins.py.j2' %} + + {# + TODO(https://github.com/googleapis/gapic-generator-python/issues/2183): + Add `kind` property to transport in ads templates + #} + def close(self): self._session.close() diff --git a/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest_base.py.j2 b/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest_base.py.j2 new file mode 100644 index 0000000000..29b64f0008 --- /dev/null +++ b/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest_base.py.j2 @@ -0,0 +1,186 @@ +{# + # Copyright (C) 2024 Google LLC + # + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. +#} +{# + # This file is a copy of `rest_base.py.j2` in standard templates located at + # `gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_base.py.j2` + # It is intended to be a symlink. + # See https://github.com/googleapis/gapic-generator-python/issues/2028 + # which contains follow up work to convert it to a symlink. + # Do not diverge from the copy of `rest_base.py.j2` in standard templates. +#} + +{% import "%namespace/%name/%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} +{% extends '_base.py.j2' %} + +{% block content %} + +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +{% if opts.add_iam_methods or api.has_iam_mixin %} +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +{% endif %} +{% if api.has_location_mixin %} +from google.cloud.location import locations_pb2 # type: ignore +{% endif %} +from .base import {{service.name}}Transport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +{{ shared_macros.operations_mixin_imports(api, service, opts) }} + + +class _Base{{ service.name }}RestTransport({{service.name}}Transport): + """Base REST backend transport for {{ service.name }}. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + + {# TODO: handle mtls stuff if that is relevant for rest transport #} + def __init__(self, *, + host: str{% if service.host %} = '{{ service.host }}'{% endif %}, + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host ({% if service.host %}Optional[str]{% else %}str{% endif %}): + {{ ' ' }}The hostname to connect to {% if service.host %}(default: '{{ service.host }}'){% endif %}. + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2173): Type hint for credentials is + # set to `Any` to support async and sync credential types in the parent rest transport classes. + # However, we should have a stronger type here such as an abstract base credentials + # class leveraged by sync and async credential classes. + #} + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + {% for method in service.methods.values()|sort(attribute="name") %} + class _Base{{method.name}}: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + {% if method.http_options and not method.client_streaming %} + {% if method.input.required_fields %} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + {% for req_field in method.input.required_fields if req_field.name in method.query_params %} + "{{ req_field.name | camel_case }}" : {% if req_field.field_pb.type == 9 %}"{{req_field.field_pb.default_value }}"{% elif req_field.field_pb.type in [11, 14] %}{}{% else %}{{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}{% endif %},{# default is str #} + {% endfor %} + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + {% endif %}{# required fields #} + + {% set method_http_options = method.http_options %} + + {{ shared_macros.http_options_method(method_http_options)|indent(8) }} + + @staticmethod + def _get_transcoded_request(http_options, request): + {% if method.input.ident.is_proto_plus_type %} + pb_request = {{method.input.ident}}.pb(request) + {% else %} + pb_request = request + {% endif %} + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + {% set body_spec = method.http_options[0].body %} + {%- if body_spec %} + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums={{ opts.rest_numeric_enums }} + ) + return body + + {%- endif %}{# body_spec #} + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums={{ opts.rest_numeric_enums }}, + )) + {% if method.input.required_fields %} + query_params.update(_Base{{ service.name }}RestTransport._Base{{method.name}}._get_unset_required_fields(query_params)) + {% endif %}{# required fields #} + + {% if opts.rest_numeric_enums %} + query_params["$alt"] = "json;enum-encoding=int" + {% endif %} + return query_params + + {% endif %}{# method.http_options and not method.client_streaming #} + {% endfor %} + + {% include '%namespace/%name/%version/%sub/services/%service/transports/_rest_mixins_base.py.j2' %} + + +__all__=( + '_Base{{ service.name }}RestTransport', +) +{% endblock %} diff --git a/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 b/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 index a7383b72e6..5a8d6d6bac 100644 --- a/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 +++ b/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 @@ -56,15 +56,13 @@ def __dir__(): {% else %} {# do not use lazy import #} {# Import subpackages. -#} {% for subpackage in api.subpackages|dictsort %} -from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %} - {{ api.naming.versioned_module_name }} import {{ subpackage }} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %}{{ api.naming.versioned_module_name }} import {{ subpackage }} {% endfor %} {# Import services for this package. -#} {% for service in api.services.values()|sort(attribute='name') if service.meta.address.subpackage == api.subpackage_view %} -from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %} - {{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.client import {{ service.client_name }} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %}{{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.client import {{ service.client_name }} {% endfor %} {# Import messages and enums from each proto. @@ -80,12 +78,10 @@ from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.' {% for proto in api.protos.values()|sort(attribute='module_name') if proto.meta.address.subpackage == api.subpackage_view %} {% for message in proto.messages.values()|sort(attribute='name') %} -from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %} - {{ api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ message.name }} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %}{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ message.name }} {% endfor %} {% for enum in proto.enums.values()|sort(attribute='name') %} -from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %} - {{ api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ enum.name }} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %}{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ enum.name }} {% endfor %}{% endfor %} {# Define __all__. This requires the full set of imported names, so we iterate over diff --git a/gapic/ads-templates/noxfile.py.j2 b/gapic/ads-templates/noxfile.py.j2 index f10310fdfb..9a2836cb8f 100644 --- a/gapic/ads-templates/noxfile.py.j2 +++ b/gapic/ads-templates/noxfile.py.j2 @@ -14,6 +14,7 @@ ALL_PYTHON = [ "3.10", "3.11", "3.12", + "3.13", ] @nox.session(python=ALL_PYTHON) diff --git a/gapic/ads-templates/setup.py.j2 b/gapic/ads-templates/setup.py.j2 index 8bc504f57a..be5e116888 100644 --- a/gapic/ads-templates/setup.py.j2 +++ b/gapic/ads-templates/setup.py.j2 @@ -72,6 +72,7 @@ setuptools.setup( "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 b/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 index 8175f2e0e5..d7f8bb7e68 100644 --- a/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 +++ b/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 @@ -1,3 +1,69 @@ +{% if 'rest' in opts.transport %} +{% for name, sig in api.mixin_api_signatures.items() %} + +def test_{{ name|snake_case }}_rest_bad_request(transport: str = 'rest', request_type={{ sig.request_type }}): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({{ api.mixin_http_options["{}".format(name)][0].sample_request }}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.{{ name|snake_case }}(request) + +@pytest.mark.parametrize("request_type", [ + {{ sig.request_type }}, + dict, +]) +def test_{{ name|snake_case }}_rest(request_type): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {{ api.mixin_http_options["{}".format(name)][0].sample_request }} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + {% if sig.response_type == "None" %} + return_value = None + {% else %} + return_value = {{ sig.response_type }}() + {% endif %} + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + {% if sig.response_type == "None" %} + json_return_value = '{}' + {% else %} + json_return_value = json_format.MessageToJson(return_value) + {% endif %} + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.{{ name|snake_case }}(request) + + # Establish that the response is the type that we expect. + {% if sig.response_type == "None" %} + assert response is None + {% else %} + assert isinstance(response, {{ sig.response_type }}) + {% endif %} +{% endfor %} +{% endif %} + {% if api.has_operations_mixin and 'grpc' in opts.transport %} {% if "DeleteOperation" in api.mixin_api_methods %} diff --git a/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 9387a124e2..3c839f4c11 100644 --- a/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -516,7 +516,7 @@ def test_{{ service.client_name|snake_case }}_create_channel_credentials_file(cl {% endif %} -{% for method in service.methods.values() if 'grpc' in opts.transport %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.name|snake_case %} +{% for method in service.methods.values() if 'grpc' in opts.transport %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.safe_name|snake_case %} @pytest.mark.parametrize("request_type", [ {{ method.input.ident }}, dict, @@ -579,7 +579,7 @@ def test_{{ method_name }}(request_type, transport: str = 'grpc'): ) {% endif %} {% if method.client_streaming %} - response = client.{{ method.name|snake_case }}(iter(requests)) + response = client.{{ method.safe_name|snake_case }}(iter(requests)) {% else %} response = client.{{ method_name }}(request) {% endif %} @@ -1053,7 +1053,7 @@ def test_{{ method_name }}_raw_page_lro(): {% endfor %} {# method in methods for grpc #} -{% for method in service.methods.values() if 'rest' in opts.transport %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.name|snake_case %}{% if method.http_options %} +{% for method in service.methods.values() if 'rest' in opts.transport %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.safe_name|snake_case %}{% if method.http_options %} {# TODO(kbandes): remove this if condition when client streaming are supported. #} {% if not method.client_streaming %} @pytest.mark.parametrize("request_type", [ @@ -1096,6 +1096,7 @@ def test_{{ method_name }}_rest(request_type, transport: str = 'rest'): req.return_value = Response() req.return_value.status_code = 500 req.return_value.request = PreparedRequest() + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} {% if method.void %} json_return_value = '' {% elif method.server_streaming %} @@ -1249,8 +1250,9 @@ def test_{{ method.name|snake_case }}_rest(request_type): response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} {% if method.client_streaming %} - response = client.{{ method.name|snake_case }}(iter(requests)) + response = client.{{ method.safe_name|snake_case }}(iter(requests)) {% elif method.server_streaming %} with mock.patch.object(response_value, 'iter_content') as iter_content: iter_content.return_value = iter(json_return_value) @@ -1495,6 +1497,7 @@ def test_{{ method_name }}_rest_interceptors(null_interceptor): } req.return_value = Response() + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} req.return_value.status_code = 200 req.return_value.request = PreparedRequest() {% if not method.void %} @@ -1545,8 +1548,9 @@ def test_{{ method_name }}_rest_bad_request(transport: str = 'rest', request_typ response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} {% if method.client_streaming %} - client.{{ method.name|snake_case }}(iter(requests)) + client.{{ method.safe_name|snake_case }}(iter(requests)) {% else %} client.{{ method_name }}(request) {% endif %} @@ -1814,7 +1818,7 @@ def test_{{ method_name }}_rest_no_http_options(): {% endfor -%} {#- method in methods for rest #} {% for method in service.methods.values() if 'rest' in opts.transport and - not method.http_options %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.name|snake_case %} + not method.http_options %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.safe_name|snake_case %} def test_{{ method_name }}_rest_error(): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), diff --git a/gapic/samplegen/samplegen.py b/gapic/samplegen/samplegen.py index ebc70936a2..b6b5635f34 100644 --- a/gapic/samplegen/samplegen.py +++ b/gapic/samplegen/samplegen.py @@ -1088,7 +1088,7 @@ def _fill_sample_metadata(sample: dict, api_schema: api.API): if not method.void: snippet_metadata.client_method.result_type = method.client_output_async.ident.sphinx if async_ else method.client_output.ident.sphinx if method.server_streaming: - snippet_metadata.client_method.result_type = f"Iterable[{snippet_metadata.client_method.result_type }]" + snippet_metadata.client_method.result_type = f"Iterable[{snippet_metadata.client_method.result_type}]" # Client Method Parameters parameters = snippet_metadata.client_method.parameters @@ -1107,7 +1107,7 @@ def _fill_sample_metadata(sample: dict, api_schema: api.API): parameters.append(snippet_metadata_pb2.ClientMethod.Parameter( # type: ignore name="timeout", type="float")) parameters.append(snippet_metadata_pb2.ClientMethod.Parameter( # type: ignore - name="metadata", type="Sequence[Tuple[str, str]")) + name="metadata", type="Sequence[Tuple[str, Union[str, bytes]]]")) return snippet_metadata diff --git a/gapic/schema/api.py b/gapic/schema/api.py index ce2fdc8022..f7a7669f8c 100644 --- a/gapic/schema/api.py +++ b/gapic/schema/api.py @@ -706,13 +706,14 @@ def all_library_settings( for library_setting in self.service_yaml_config.publishing.library_settings } - # Add default settings for the current proto package - if not result: - result = { - self.naming.proto_package: client_pb2.ClientLibrarySettings( - version=self.naming.proto_package + # NOTE: Add default settings for the current proto package + # for the following cases: + # - if library settings are not specified in the service config. + # - if library_settings.version != self.naming.proto_package (proto package name) + if self.naming.proto_package not in result: + result[self.naming.proto_package] = client_pb2.ClientLibrarySettings( + version=self.naming.proto_package ) - } return result diff --git a/gapic/schema/metadata.py b/gapic/schema/metadata.py index dc9389e8f5..480cde40cd 100644 --- a/gapic/schema/metadata.py +++ b/gapic/schema/metadata.py @@ -173,6 +173,11 @@ def proto_package(self) -> str: """Return the proto package for this type.""" return '.'.join(self.package) + @property + def proto_package_versioned(self) -> str: + """Return the versioned proto package for this type.""" + return ".".join(self.convert_to_versioned_package()) + def convert_to_versioned_package(self) -> Tuple[str, ...]: # We need to change the import statement to use an # underscore between the module and the version. For example, diff --git a/gapic/schema/wrappers.py b/gapic/schema/wrappers.py index fd75e8fd5e..473b56e7eb 100644 --- a/gapic/schema/wrappers.py +++ b/gapic/schema/wrappers.py @@ -30,6 +30,7 @@ import collections import copy import dataclasses +import functools import json import keyword import re @@ -1035,10 +1036,20 @@ def _to_regex(self, path_template: str) -> Pattern: """ return re.compile(f"^{self._convert_to_regex(path_template)}$") + # Use caching to avoid repeated computation + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2161): + # Use `@functools.cache` instead of `@functools.lru_cache` once python 3.8 is dropped. + # https://docs.python.org/3/library/functools.html#functools.cache + @functools.lru_cache(maxsize=None) def to_regex(self) -> Pattern: return self._to_regex(self.path_template) @property + # Use caching to avoid repeated computation + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2161): + # Use `@functools.cache` instead of `@functools.lru_cache` once python 3.8 is dropped. + # https://docs.python.org/3/library/functools.html#functools.cache + @functools.lru_cache(maxsize=None) def key(self) -> Union[str, None]: if self.path_template == "": return self.field @@ -1067,6 +1078,69 @@ def try_parse_routing_rule(cls, routing_rule: routing_pb2.RoutingRule) -> Option params = [RoutingParameter(x.field, x.path_template) for x in params] return cls(params) + @classmethod + def resolve(cls, routing_rule: routing_pb2.RoutingRule, request: Union[dict, str]) -> dict: + """Resolves the routing header which should be sent along with the request. + The routing header is determined based on the given routing rule and request. + See the following link for more information on explicit routing headers: + https://google.aip.dev/client-libraries/4222#explicit-routing-headers-googleapirouting + + Args: + routing_rule(routing_pb2.RoutingRule): A collection of Routing Parameter specifications + defined by `routing_pb2.RoutingRule`. + See https://github.com/googleapis/googleapis/blob/cb39bdd75da491466f6c92bc73cd46b0fbd6ba9a/google/api/routing.proto#L391 + request(Union[dict, str]): The request for which the routine rule should be resolved. + The format can be either a dictionary or json string representing the request. + + Returns(dict): + A dictionary containing the resolved routing header to the sent along with the given request. + """ + + def _get_field(request, field_path: str): + segments = field_path.split(".") + + # Either json string or dictionary is supported + if isinstance(request, str): + current = json.loads(request) + else: + current = request + + # This is to cater for the case where the `field_path` contains a + # dot-separated path of field names leading to a field in a sub-message. + for x in segments: + current = current.get(x, None) + # Break if the sub-message does not exist + if current is None: + break + return current + + header_params = {} + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2160): Move this logic to + # `google-api-core` so that the shared code can be used in both `wrappers.py` and GAPIC clients + # via Jinja templates. + for routing_param in routing_rule.routing_parameters: + request_field_value = _get_field(request, routing_param.field) + # Only resolve the header for routing parameter fields which are populated in the request + if request_field_value is not None: + # If there is a path_template for a given routing parameter field, the value of the field must match + # If multiple `routing_param`s describe the same key + # (via the `path_template` field or via the `field` field when + # `path_template` is not provided), the "last one wins" rule + # determines which parameter gets used. See https://google.aip.dev/client-libraries/4222. + routing_parameter_key = routing_param.key + if routing_param.path_template: + routing_param_regex = routing_param.to_regex() + regex_match = routing_param_regex.match( + request_field_value + ) + if regex_match: + header_params[routing_parameter_key] = regex_match.group( + routing_parameter_key + ) + else: # No need to match + header_params[routing_parameter_key] = request_field_value + return header_params + @dataclasses.dataclass(frozen=True) class HttpRule: diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 index 750bd734ab..60b8f0a7e9 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 @@ -9,7 +9,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -20,8 +20,7 @@ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -34,11 +33,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -66,7 +61,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -77,8 +72,7 @@ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -91,11 +85,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -122,7 +112,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> None: r"""Deletes a long-running operation. @@ -138,8 +128,7 @@ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: None """ @@ -151,11 +140,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -178,7 +163,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -193,8 +178,7 @@ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: None """ @@ -206,11 +190,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -233,7 +213,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> operations_pb2.Operation: r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. @@ -250,8 +230,7 @@ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -264,11 +243,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.wait_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.wait_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -300,7 +275,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -313,8 +288,7 @@ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -391,11 +365,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.set_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -422,7 +392,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -436,8 +406,7 @@ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -514,11 +483,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -545,7 +510,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control policy for a function. @@ -560,8 +525,7 @@ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.iam_policy_pb2.TestIamPermissionsResponse: Response message for ``TestIamPermissions`` method. @@ -575,11 +539,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.test_iam_permissions] # Certain fields should be provided within the metadata header; # add these here. @@ -610,7 +570,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> locations_pb2.Location: r"""Gets information about a location. @@ -621,8 +581,7 @@ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.location_pb2.Location: Location object. @@ -635,11 +594,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_location, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_location] # Certain fields should be provided within the metadata header; # add these here. @@ -666,7 +621,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> locations_pb2.ListLocationsResponse: r"""Lists information about the supported locations for this service. @@ -677,8 +632,7 @@ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.location_pb2.ListLocationsResponse: Response message for ``ListLocations`` method. @@ -691,11 +645,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_locations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] # Certain fields should be provided within the metadata header; # add these here. diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 index e741d6d047..97fa01773c 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 @@ -30,7 +30,7 @@ {% endif %} retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, {% if method.extended_lro and not full_extended_lro %}{# This is a hack to preserve backwards compatibility with the "unary" surfaces #} ) -> {{ method.extended_lro.operation_type.ident }}: {% elif not method.server_streaming %} @@ -68,8 +68,7 @@ retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} {% if not method.void %} Returns: @@ -144,45 +143,7 @@ # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.{{ method.transport_safe_name|snake_case}}] - {% if method.explicit_routing %} - header_params = {} - {% if not method.client_streaming %} - {% for routing_param in method.routing_rule.routing_parameters %} - {% if routing_param.path_template %} {# Need to match. #} - - routing_param_regex = {{ routing_param.to_regex() }} - regex_match = routing_param_regex.match(request.{{ routing_param.field }}) - if regex_match and regex_match.group("{{ routing_param.key }}"): - header_params["{{ routing_param.key }}"] = regex_match.group("{{ routing_param.key }}") - - {% else %} - - if request.{{ routing_param.field }}: - header_params["{{ routing_param.key }}"] = request.{{ routing_param.field }} - - {% endif %} - {% endfor %} {# method.routing_rule.routing_parameters #} - {% endif %} {# if not method.client_streaming #} - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - {% elif method.field_headers %} {# implicit routing #} - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - {% for field_header in method.field_headers %} - {% if not method.client_streaming %} - ("{{ field_header.raw }}", request.{{ field_header.disambiguated }}), - {% endif %} - {% endfor %} - )), - ) - {% endif %} {# method.explicit_routing #} - +{{ shared_macros.create_metadata(method) }} {{ shared_macros.add_api_version_header_to_metadata(service.version) }} {{ shared_macros.auto_populate_uuid4_fields(api, method) }} diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 index ef8137dc3b..4bfc76e954 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 @@ -7,7 +7,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -18,8 +18,7 @@ retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -32,11 +31,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -68,7 +63,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -79,8 +74,7 @@ retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -93,11 +87,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -128,7 +118,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> None: r"""Deletes a long-running operation. @@ -144,8 +134,7 @@ retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: None """ @@ -157,11 +146,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.delete_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -184,7 +169,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -199,8 +184,7 @@ retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: None """ @@ -212,11 +196,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -239,7 +219,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> operations_pb2.Operation: r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. @@ -256,8 +236,7 @@ retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -270,11 +249,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.wait_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.wait_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -310,7 +285,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -323,8 +298,7 @@ retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -401,11 +375,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -436,7 +406,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -450,8 +420,7 @@ retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -528,11 +497,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -563,7 +528,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control policy for a function. @@ -578,8 +543,7 @@ retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.iam_policy_pb2.TestIamPermissionsResponse: Response message for ``TestIamPermissions`` method. @@ -593,11 +557,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] # Certain fields should be provided within the metadata header; # add these here. @@ -632,7 +592,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> locations_pb2.Location: r"""Gets information about a location. @@ -643,8 +603,7 @@ retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.location_pb2.Location: Location object. @@ -657,11 +616,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_location, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_location] # Certain fields should be provided within the metadata header; # add these here. @@ -692,7 +647,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> locations_pb2.ListLocationsResponse: r"""Lists information about the supported locations for this service. @@ -703,8 +658,7 @@ retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.location_pb2.ListLocationsResponse: Response message for ``ListLocations`` method. @@ -717,11 +671,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_locations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.list_locations] # Certain fields should be provided within the metadata header; # add these here. diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 index b7b223caad..e96594c018 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 @@ -50,11 +50,53 @@ The `try/except` below can be removed once the minimum version of try: from google.api_core import version_header HAS_GOOGLE_API_CORE_VERSION_HEADER = True # pragma: NO COVER +{# NOTE: `pragma: NO COVER` is needed since the coverage for presubmits isn't combined. #} except ImportError: # pragma: NO COVER HAS_GOOGLE_API_CORE_VERSION_HEADER = False {% endif %}{# service_version #} {% endmacro %} +{% macro create_metadata(method) %} + {% if method.explicit_routing %} + header_params = {} + {% if not method.client_streaming %} + {% for routing_param in method.routing_rule.routing_parameters %} + {% if routing_param.path_template %} {# Need to match. #} + + routing_param_regex = {{ routing_param.to_regex() }} + regex_match = routing_param_regex.match(request.{{ routing_param.field }}) + if regex_match and regex_match.group("{{ routing_param.key }}"): + header_params["{{ routing_param.key }}"] = regex_match.group("{{ routing_param.key }}") + + {% else %} + + if request.{{ routing_param.field }}: + header_params["{{ routing_param.key }}"] = request.{{ routing_param.field }} + + {% endif %} + {% endfor %} {# method.routing_rule.routing_parameters #} + {% endif %} {# if not method.client_streaming #} + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + {% elif method.field_headers %}{# implicit routing #} + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + {% if not method.client_streaming %} + {% for field_header in method.field_headers %} + ("{{ field_header.raw }}", request.{{ field_header.disambiguated }}), + {% endfor %}{# for field_header in method.field_headers #} + {% endif %}{# not method.client_streaming #} + )), + ) + {% endif %}{# method.explicit_routing #} +{% endmacro %}{# create_metadata #} + {% macro add_api_version_header_to_metadata(service_version) %} {# Add API Version to metadata as per https://github.com/aip-dev/google.aip.dev/pull/1331. @@ -110,7 +152,7 @@ def _get_http_options(): return http_options {% endmacro %} -{% macro response_method(body_spec, is_async=False) %} +{% macro response_method(body_spec, is_async=False, is_streaming_method=False) %} {% set async_prefix = "async " if is_async else "" %} {% set await_prefix = "await " if is_async else "" %} @staticmethod @@ -135,6 +177,14 @@ def _get_http_options(): {% if body_spec %} data=body, {% endif %} + {% if not is_async and is_streaming_method %} + {# NOTE: The underlying `requests` library used for making a sync request + # requires us to set `stream=True` to avoid loading the entire response + # into memory at once. For an async request, given its nature where it + # reads data chunk by chunk, this is not required. + #} + stream=True, + {% endif %} ) return response {% endmacro %} @@ -145,18 +195,16 @@ def _get_http_options(): Args: body_spec (str): The http options body i.e. method.http_options[0].body method_name (str): The method name. - service_name (str): The service name. + service: The service. is_async (bool): Used to determine the code path i.e. whether for sync or async call. #} -{% macro rest_call_method_common(body_spec, method_name, service_name, is_async=False) %} +{% macro rest_call_method_common(body_spec, method_name, service, is_async=False, is_proto_plus_type=False) %} +{% set service_name = service.name %} {% set await_prefix = "await " if is_async else "" %} {% set async_class_prefix = "Async" if is_async else "" %} http_options = _Base{{ service_name }}RestTransport._Base{{method_name}}._get_http_options() - {% if not is_async %} - {# TODO (ohmayr): Make this unconditional once REST interceptors are supported for async. Googlers, - see internal tracking issue: b/362949568. #} - request, metadata = self._interceptor.pre_{{ method_name|snake_case }}(request, metadata) - {% endif %} + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2274): Add debug log before intercepting a request #} + request, metadata = {{ await_prefix }}self._interceptor.pre_{{ method_name|snake_case }}(request, metadata) transcoded_request = _Base{{ service_name }}RestTransport._Base{{method_name}}._get_transcoded_request(http_options, request) {% if body_spec %} @@ -166,6 +214,32 @@ def _get_http_options(): # Jsonify the query params query_params = _Base{{ service_name }}RestTransport._Base{{method_name}}._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = {% if is_proto_plus_type %}type(request).to_json(request){% else %}json_format.MessageToJson(request){% endif %} + + except: + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2282): Remove try/except and correctly parse request payload. #} + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for {{ service.meta.address.proto_package_versioned }}.{{ service.client_name }}.{{ method_name }}", + extra = { + "serviceName": "{{ service.meta.address.proto }}", + "rpcName": "{{ method_name }}", + "httpRequest": http_request, + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2275): logging `metadata` seems repetitive and may need to be cleaned up #} + "metadata": http_request["headers"], + }, + ) + # Send the request response = {{ await_prefix }}{{ async_class_prefix }}{{ service_name }}RestTransport._{{method_name}}._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request{% if body_spec %}, body{% endif %}) @@ -181,7 +255,8 @@ def _get_http_options(): payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2137): Remove `type: ignore` once version check is added for google-api-core. #} + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore {% else %} raise core_exceptions.from_http_response(response) {% endif %}{# is_async #} @@ -189,7 +264,7 @@ def _get_http_options(): {% endmacro %} -{% macro prep_wrapped_messages_async_method(service) %} +{% macro prep_wrapped_messages_async_method(api, service) %} def _prep_wrapped_messages(self, client_info): """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -219,6 +294,16 @@ def _prep_wrapped_messages(self, client_info): client_info=client_info, ), {% endfor %}{# service.methods.values() #} + {% for method_name in api.mixin_api_methods.keys() %} + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2197): Use `transport_safe_name` similar + # to what we do for non-mixin methods above. + #} + self.{{ method_name|snake_case }}: self._wrap_method( + self.{{ method_name|snake_case }}, + default_timeout=None, + client_info=client_info, + ), + {% endfor %}{# method_name in api.mixin_api_methods.keys() #} } {% endmacro %} @@ -237,3 +322,215 @@ def _wrap_method(self, func, *args, **kwargs): kwargs["kind"] = self.kind return gapic_v1.method_async.wrap_method(func, *args, **kwargs) {% endmacro %} + +{# `create_interceptor_class` generates an Interceptor class for + # synchronous and asynchronous rest transports +#} +{% macro create_interceptor_class(api, service, method, is_async=False) %} +{% set async_prefix = "async " if is_async else "" %} +{% set async_method_name_prefix = "Async" if is_async else "" %} +{% set async_docstring = "Asynchronous " if is_async else "" %} +{% set async_suffix = "_async" if is_async else "" %} + +class {{ async_method_name_prefix }}{{ service.name }}RestInterceptor: + """{{ async_docstring }}Interceptor for {{ service.name }}. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the {{ async_method_name_prefix }}{{ service.name }}RestTransport. + + .. code-block:: python + class MyCustom{{ service.name }}Interceptor({{ service.name }}RestInterceptor): + {% for _, method in service.methods|dictsort if not method.client_streaming %} + {{ async_prefix }}def pre_{{ method.name|snake_case }}(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + {% if not method.void %} + {{ async_prefix }}def post_{{ method.name|snake_case }}(self, response): + logging.log(f"Received response: {response}") + return response + + {% endif %} + + {% endfor %} + transport = {{ async_method_name_prefix }}{{ service.name }}RestTransport(interceptor=MyCustom{{ service.name }}Interceptor()) + client = {{ async_prefix }}{{ service.client_name }}(transport=transport) + + + """ + {% for method in service.methods.values()|sort(attribute="name") if not method.client_streaming and method.http_options %} + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2147): Remove the condition below once async rest transport supports the guarded methods. #} + {{ async_prefix }}def pre_{{ method.name|snake_case }}(self, request: {{method.input.ident}}, {{ client_method_metadata_argument() }}) -> Tuple[{{method.input.ident}}, {{ client_method_metadata_type() }}]: + """Pre-rpc interceptor for {{ method.name|snake_case }} + + Override in a subclass to manipulate the request or metadata + before they are sent to the {{ service.name }} server. + """ + return request, metadata + + {% if not method.void %} + {% if not method.server_streaming %} + {{ async_prefix }}def post_{{ method.name|snake_case }}(self, response: {{method.output.ident}}) -> {{method.output.ident}}: + {% else %} + {{ async_prefix }}def post_{{ method.name|snake_case }}(self, response: rest_streaming{{ async_suffix }}.{{ async_method_name_prefix }}ResponseIterator) -> rest_streaming{{ async_suffix }}.{{ async_method_name_prefix }}ResponseIterator: + {% endif %} + """Post-rpc interceptor for {{ method.name|snake_case }} + + DEPRECATED. Please use the `post_{{ method.name|snake_case }}_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the {{ service.name }} server but before + it is returned to user code. This `post_{{ method.name|snake_case }}` interceptor runs + before the `post_{{ method.name|snake_case }}_with_metadata` interceptor. + """ + return response + + {% if not method.server_streaming %} + {{ async_prefix }}def post_{{ method.name|snake_case }}_with_metadata(self, response: {{method.output.ident}}, {{ client_method_metadata_argument() }}) -> Tuple[{{method.output.ident}}, {{ client_method_metadata_type() }}]: + {% else %} + {{ async_prefix }}def post_{{ method.name|snake_case }}_with_metadata(self, response: rest_streaming{{ async_suffix }}.{{ async_method_name_prefix }}ResponseIterator, {{ client_method_metadata_argument() }}) -> Tuple[rest_streaming{{ async_suffix }}.{{ async_method_name_prefix }}ResponseIterator, {{ client_method_metadata_type() }}]: + {% endif %} + """Post-rpc interceptor for {{ method.name|snake_case }} + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the {{ service.name }} server but before it is returned to user code. + + We recommend only using this `post_{{ method.name|snake_case }}_with_metadata` + interceptor in new development instead of the `post_{{ method.name|snake_case }}` interceptor. + When both interceptors are used, this `post_{{ method.name|snake_case }}_with_metadata` interceptor runs after the + `post_{{ method.name|snake_case }}` interceptor. The (possibly modified) response returned by + `post_{{ method.name|snake_case }}` will be passed to + `post_{{ method.name|snake_case }}_with_metadata`. + """ + return response, metadata + + {% endif %}{# not method.void #} + {% endfor %} + + {% for name, signature in api.mixin_api_signatures.items() %} + {{ async_prefix }}def pre_{{ name|snake_case }}( + self, request: {{signature.request_type}}, {{ client_method_metadata_argument() }} + ) -> Tuple[{{signature.request_type}}, {{ client_method_metadata_type() }}]: + """Pre-rpc interceptor for {{ name|snake_case }} + + Override in a subclass to manipulate the request or metadata + before they are sent to the {{ service.name }} server. + """ + return request, metadata + + {{ async_prefix }}def post_{{ name|snake_case }}( + self, response: {{signature.response_type}} + ) -> {{signature.response_type}}: + """Post-rpc interceptor for {{ name|snake_case }} + + Override in a subclass to manipulate the response + after it is returned by the {{ service.name }} server but before + it is returned to user code. + """ + return response + + {% endfor %} +{% endmacro %} + +{% macro generate_mixin_call_method(service, api, name, sig, is_async) %} +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2198): generate _Mixin classes + # and @property methods into separate macros so that _Method and _Mixin classes can be defined all + # together and the @property methods for each can be defined after the class definitions. +#} +{% set async_prefix = "async " if is_async else "" %} +{% set async_method_name_prefix = "Async" if is_async else "" %} +{% set async_suffix = "_async" if is_async else "" %} +{% set await_prefix = "await " if is_async else "" %} + +@property +def {{ name|snake_case }}(self): + return self._{{ name }}(self._session, self._host, self._interceptor) # type: ignore + +class _{{ name }}(_Base{{ service.name }}RestTransport._Base{{name}}, {{ async_method_name_prefix }}{{service.name}}RestStub): + def __hash__(self): + return hash("{{ async_method_name_prefix }}{{ service.name }}RestTransport.{{ name }}") + + {% set body_spec = api.mixin_http_options["{}".format(name)][0].body %} + {{ response_method(body_spec, is_async=is_async, is_streaming_method=None) | indent(4) }} + + {{ async_prefix }}def __call__(self, + request: {{ sig.request_type }}, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + {{ client_method_metadata_argument()|indent(8) }}={{ client_method_metadata_default_value() }}, + ) -> {{ sig.response_type }}: + + r"""Call the {{- ' ' -}} + {{ (name|snake_case).replace('_',' ')|wrap(width=70, offset=45, indent=8) }} + {{- ' ' -}} method over HTTP. + + Args: + request ({{ sig.request_type }}): + The request object for {{ name }} method. + retry (google.api_core.retry{{ async_suffix }}.{{ async_method_name_prefix }}Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + {{ client_method_metadata_argument_doc()|indent(4) }} + {% if sig.response_type != 'None' %} + + Returns: + {{ sig.response_type }}: Response from {{ name }} method. + {% endif %} + """ + {{ rest_call_method_common(body_spec, name, service, is_async)|indent(4) }} + + {% if sig.response_type == "None" %} + return {{ await_prefix }}self._interceptor.post_{{ name|snake_case }}(None) + {% else %} + {% if is_async %} + content = await response.read() + {% else %} + content = response.content.decode("utf-8") + {% endif %} + resp = {{ sig.response_type }}() + resp = json_format.Parse(content, resp) + resp = {{ await_prefix }}self._interceptor.post_{{ name|snake_case }}(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2283): Remove try/except once unit tests are updated. #} + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for {{ service.meta.address.proto_package_versioned }}.{{ service.async_client_name }}.{{ name }}", + extra = { + "serviceName": "{{ service.meta.address.proto }}", + "rpcName": "{{ name }}", + "httpResponse": http_response, + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2275): logging `metadata` seems repetitive and may need to be cleaned up #} + "metadata": http_response["headers"], + }, + ) + return resp + {% endif %} + +{% endmacro %} + +{% macro client_method_metadata_argument() %}metadata: {{ client_method_metadata_type() }}{% endmacro %} + +{% macro client_method_metadata_type() %}Sequence[Tuple[str, Union[str, bytes]]]{% endmacro %} + +{% macro client_method_metadata_default_value() %}(){% endmacro %} + +{% macro client_method_metadata_argument_doc() %}metadata ({{ client_method_metadata_type() }}): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`.{% endmacro %} diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 36a40f4db0..2513ce10c0 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -4,6 +4,7 @@ {% import "%namespace/%name_%version/%sub/services/%service/_client_macros.j2" as macros %} {% import "%namespace/%name_%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} +import logging as std_logging from collections import OrderedDict import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, {% if service.any_server_streaming %}AsyncIterable, Awaitable, {% endif %}{% if service.any_client_streaming %}AsyncIterator, {% endif %}Sequence, Tuple, Type, Union @@ -52,6 +53,13 @@ from .transports.base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import {{ service.grpc_asyncio_transport_name }} from .client import {{ service.client_name }} +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) {# TODO(yon-mg): handle rest transport async client interaction #} class {{ service.async_client_name }}: @@ -243,6 +251,20 @@ class {{ service.async_client_name }}: ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `{{ service.meta.address.proto_package_versioned }}.{{ service.async_client_name }}`.", + extra = { + "serviceName": "{{ service.meta.address.proto }}", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "{{ service.meta.address.proto }}", + "credentialsType": None, + } + ) + {% for method in service.methods.values() %} {% with method_name = method.safe_name|snake_case + "_unary" if method.operation_service else method.safe_name|snake_case %} {%+ if not method.server_streaming %}async {% endif %}def {{ method_name }}(self, @@ -259,7 +281,7 @@ class {{ service.async_client_name }}: {% endif %} retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, {% if not method.server_streaming %} ) -> {{ method.client_output_async.ident }}: {% else %} @@ -295,8 +317,7 @@ class {{ service.async_client_name }}: retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} {% if not method.void %} Returns: @@ -364,20 +385,7 @@ class {{ service.async_client_name }}: # and friendly error handling. rpc = self._client._transport._wrapped_methods[self._client._transport.{{ method.transport_safe_name|snake_case }}] - {% if method.field_headers %} - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - {% for field_header in method.field_headers %} - {% if not method.client_streaming %} - ("{{ field_header.raw }}", request.{{ field_header.disambiguated }}), - {% endif %} - {% endfor %} - )), - ) - {% endif %} - +{{ shared_macros.create_metadata(method) }} {{ shared_macros.add_api_version_header_to_metadata(service.version) }} {{ shared_macros.auto_populate_uuid4_fields(api, method) }} @@ -436,7 +444,7 @@ class {{ service.async_client_name }}: *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -449,8 +457,7 @@ class {{ service.async_client_name }}: retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -526,11 +533,7 @@ class {{ service.async_client_name }}: # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.set_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -553,7 +556,7 @@ class {{ service.async_client_name }}: *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -567,8 +570,7 @@ class {{ service.async_client_name }}: retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -645,11 +647,7 @@ class {{ service.async_client_name }}: # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -672,7 +670,7 @@ class {{ service.async_client_name }}: *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified permissions against the IAM access control policy for a function. @@ -687,8 +685,7 @@ class {{ service.async_client_name }}: retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~iam_policy_pb2.PolicyTestIamPermissionsResponse: Response message for ``TestIamPermissions`` method. @@ -702,11 +699,7 @@ class {{ service.async_client_name }}: # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.test_iam_permissions] # Certain fields should be provided within the metadata header; # add these here. diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 941bf3eaf4..446c82acca 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -12,6 +12,7 @@ import functools {% endif %} from http import HTTPStatus import json +import logging as std_logging import os import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union, cast @@ -41,6 +42,14 @@ try: except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + {% filter sort_lines %} {% for method in service.methods.values() %} {% for ref_type in method.flat_ref_types %} @@ -68,7 +77,14 @@ from .transports.grpc_asyncio import {{ service.grpc_asyncio_transport_name }} from .transports.rest import {{ service.name }}RestTransport {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove this condition when async rest is GA. #} {% if rest_async_io_enabled %} -from .transports.rest_asyncio import Async{{ service.name }}RestTransport +try: + from .transports.rest_asyncio import Async{{ service.name }}RestTransport + HAS_ASYNC_REST_DEPENDENCIES = True +{# NOTE: `pragma: NO COVER` is needed since the coverage for presubmits isn't combined. #} +except ImportError as e: # pragma: NO COVER + HAS_ASYNC_REST_DEPENDENCIES = False + ASYNC_REST_EXCEPTION = e + {% endif %}{# if rest_async_io_enabled #} {% endif %} @@ -89,7 +105,8 @@ class {{ service.client_name }}Meta(type): _transport_registry["rest"] = {{ service.name }}RestTransport {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove this condition when async rest is GA. #} {% if rest_async_io_enabled %} - _transport_registry["rest_asyncio"] = Async{{ service.name }}RestTransport + if HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER + _transport_registry["rest_asyncio"] = Async{{ service.name }}RestTransport {% endif %}{# if rest_async_io_enabled #} {% endif %} @@ -106,6 +123,12 @@ class {{ service.client_name }}Meta(type): The transport class to use. """ # If a specific transport is requested, return that one. + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove this condition when async rest is GA. #} + {% if rest_async_io_enabled %} + {# NOTE: `pragma: NO COVER` is needed since the coverage for presubmits isn't combined. #} + if label == "rest_asyncio" and not HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER + raise ASYNC_REST_EXCEPTION + {% endif %} if label: return cls._transport_registry[label] @@ -388,33 +411,6 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): if len(universe_domain.strip()) == 0: raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - - @staticmethod - def _compare_universes(client_universe: str, - credentials: ga_credentials.Credentials) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = {{ service.client_name }}._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError("The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default.") - return True def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -425,9 +421,9 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = (self._is_universe_domain_valid or - {{ service.client_name }}._compare_universes(self.universe_domain, self.transport._credentials)) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True def _add_cred_info_for_auth_errors( self, @@ -545,6 +541,10 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError("client_options.api_key and credentials are mutually exclusive") @@ -574,16 +574,51 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): self._use_mtls_endpoint)) if not transport_provided: + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove this condition when async rest is GA. #} + {% if rest_async_io_enabled %} + transport_init: Union[Type[{{ service.name }}Transport], Callable[..., {{ service.name }}Transport]] = ( + {{ service.client_name }}.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., {{ service.name }}Transport], transport) + ) + + if "rest_asyncio" in str(transport_init): + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2136): Support the following parameters in async rest: #} + unsupported_params = { + "google.api_core.client_options.ClientOptions.credentials_file": self._client_options.credentials_file, + "google.api_core.client_options.ClientOptions.scopes": self._client_options.scopes, + "google.api_core.client_options.ClientOptions.quota_project_id": self._client_options.quota_project_id, + "google.api_core.client_options.ClientOptions.client_cert_source": self._client_options.client_cert_source, + "google.api_core.client_options.ClientOptions.api_audience": self._client_options.api_audience, + + } + provided_unsupported_params = [name for name, value in unsupported_params.items() if value is not None] + if provided_unsupported_params: + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2137): Remove `type: ignore` once we add a version check for google-api-core. #} + raise core_exceptions.AsyncRestUnsupportedParameterError( # type: ignore + f"The following provided parameters are not supported for `transport=rest_asyncio`: {', '.join(provided_unsupported_params)}" + ) + self._transport = transport_init( + credentials=credentials, + host=self._api_endpoint, + client_info=client_info, + ) + return + + {% endif %} import google.auth._default # type: ignore if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): credentials = google.auth._default.get_api_key_credentials(api_key_value) + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove this condition when async rest is GA. #} + {% if not rest_async_io_enabled %} transport_init: Union[Type[{{ service.name }}Transport], Callable[..., {{ service.name }}Transport]] = ( {{ service.client_name }}.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., {{ service.name }}Transport], transport) ) + {% endif %} # initialize with the provided callable or the passed in class self._transport = transport_init( credentials=credentials, @@ -596,6 +631,21 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): always_use_jwt_access=True, api_audience=self._client_options.api_audience, ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `{{ service.meta.address.proto_package_versioned }}.{{ service.client_name }}`.", + extra = { + "serviceName": "{{ service.meta.address.proto }}", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._transport, "_credentials") else { + "serviceName": "{{ service.meta.address.proto }}", + "credentialsType": None, + } + ) {% for method in service.methods.values() %} @@ -631,7 +681,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -644,8 +694,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -755,7 +804,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -769,8 +818,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -880,7 +928,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control policy for a function. @@ -895,8 +943,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.iam_policy_pb2.TestIamPermissionsResponse: Response message for ``TestIamPermissions`` method. diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 index 796cefe48c..0d7b1dad8b 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 @@ -1,3 +1,4 @@ +{% import "%namespace/%name_%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} {% extends '_base.py.j2' %} {% block content %} @@ -54,7 +55,7 @@ class {{ method.name }}Pager: *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + {{ shared_macros.client_method_metadata_argument()|indent(4) }} = {{ shared_macros.client_method_metadata_default_value() }}): """Instantiate the pager. Args: @@ -67,8 +68,7 @@ class {{ method.name }}Pager: retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} """ self._method = method self._request = {{ method.input.ident }}(request) @@ -131,7 +131,7 @@ class {{ method.name }}AsyncPager: *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + {{ shared_macros.client_method_metadata_argument()|indent(4) }} = {{ shared_macros.client_method_metadata_default_value() }}): """Instantiates the pager. Args: @@ -144,8 +144,7 @@ class {{ method.name }}AsyncPager: retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} """ self._method = method self._request = {{ method.input.ident }}(request) diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 index 66be2e5c29..9745b08d78 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 @@ -1,9 +1,11 @@ +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove the following variable (and the condition later in this file) for async rest transport once support for it is GA. #} +{% set rest_async_io_enabled = api.all_library_settings[api.naming.proto_package].python_settings.experimental_features.rest_async_io_enabled %} {% extends '_base.py.j2' %} {% block content %} from collections import OrderedDict -from typing import Dict, Type +from typing import Dict, Type{% if rest_async_io_enabled %}, Tuple{% endif +%} from .base import {{ service.name }}Transport {% if 'grpc' in opts.transport %} @@ -13,6 +15,17 @@ from .grpc_asyncio import {{ service.name }}GrpcAsyncIOTransport {% if 'rest' in opts.transport %} from .rest import {{ service.name }}RestTransport from .rest import {{ service.name }}RestInterceptor +{% if rest_async_io_enabled %} +ASYNC_REST_CLASSES: Tuple[str, ...] +try: + from .rest_asyncio import Async{{ service.name }}RestTransport + from .rest_asyncio import Async{{ service.name }}RestInterceptor + ASYNC_REST_CLASSES = ('Async{{ service.name }}RestTransport', 'Async{{ service.name }}RestInterceptor') + HAS_REST_ASYNC = True +except ImportError: # pragma: NO COVER + ASYNC_REST_CLASSES = () + HAS_REST_ASYNC = False +{% endif %}{# if rest_async_io_enabled #} {% endif %} @@ -25,6 +38,10 @@ _transport_registry['grpc_asyncio'] = {{ service.name }}GrpcAsyncIOTransport {% endif %} {% if 'rest' in opts.transport %} _transport_registry['rest'] = {{ service.name }}RestTransport +{% if rest_async_io_enabled %} +if HAS_REST_ASYNC: # pragma: NO COVER + _transport_registry['rest_asyncio'] = Async{{ service.name }}RestTransport +{% endif %}{# if rest_async_io_enabled #} {% endif %} __all__ = ( @@ -37,5 +54,5 @@ __all__ = ( '{{ service.name }}RestTransport', '{{ service.name }}RestInterceptor', {% endif %} -) +){% if 'rest' in opts.transport and rest_async_io_enabled%} + ASYNC_REST_CLASSES{%endif%} {% endblock %} diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2 index 1867d0ac8c..3458cc78e8 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2 @@ -32,7 +32,7 @@ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + self._stubs["delete_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/DeleteOperation", request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, response_deserializer=None, @@ -52,7 +52,7 @@ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -71,8 +71,8 @@ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["wait_operation"] = self.grpc_channel.unary_unary( + if "wait_operation" not in self._stubs: + self._stubs["wait_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/WaitOperation", request_serializer=operations_pb2.WaitOperationRequest.SerializeToString, response_deserializer=None, @@ -92,7 +92,7 @@ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -112,7 +112,7 @@ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, @@ -136,7 +136,7 @@ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( + self._stubs["list_locations"] = self._logged_channel.unary_unary( "/google.cloud.location.Locations/ListLocations", request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, response_deserializer=locations_pb2.ListLocationsResponse.FromString, @@ -156,7 +156,7 @@ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( + self._stubs["get_location"] = self._logged_channel.unary_unary( "/google.cloud.location.Locations/GetLocation", request_serializer=locations_pb2.GetLocationRequest.SerializeToString, response_deserializer=locations_pb2.Location.FromString, @@ -188,7 +188,7 @@ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/SetIamPolicy", request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, response_deserializer=policy_pb2.Policy.FromString, @@ -216,7 +216,7 @@ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/GetIamPolicy", request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, response_deserializer=policy_pb2.Policy.FromString, @@ -246,7 +246,7 @@ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/TestIamPermissions", request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2 index f01102a18f..691b98f0ef 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2 @@ -17,53 +17,7 @@ {% import "%namespace/%name_%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} {% if "rest" in opts.transport %} - {% for name, sig in api.mixin_api_signatures.items() %} - @property - def {{ name|snake_case }}(self): - return self._{{ name }}(self._session, self._host, self._interceptor) # type: ignore - - class _{{ name }}(_Base{{ service.name }}RestTransport._Base{{name}}, {{service.name}}RestStub): - {% set body_spec = api.mixin_http_options["{}".format(name)][0].body %} - {{ shared_macros.response_method(body_spec)|indent(8) }} - - def __call__(self, - request: {{ sig.request_type }}, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> {{ sig.response_type }}: - - r"""Call the {{- ' ' -}} - {{ (name|snake_case).replace('_',' ')|wrap( - width=70, offset=45, indent=8) }} - {{- ' ' -}} method over HTTP. - - Args: - request ({{ sig.request_type }}): - The request object for {{ name }} method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - {% if sig.response_type != 'None' %} - - Returns: - {{ sig.response_type }}: Response from {{ name }} method. - {% endif %} - """ - {{ shared_macros.rest_call_method_common(body_spec, name, service.name)|indent(8) }} - - {% if sig.response_type == "None" %} - return self._interceptor.post_{{ name|snake_case }}(None) - {% else %} - - resp = {{ sig.response_type }}() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_{{ name|snake_case }}(resp) - return resp - {% endif %} - + {{ shared_macros.generate_mixin_call_method(service, api, name, sig, is_async=False) | indent(4) }} {% endfor %} {% endif %} {# rest in opts.transport #} diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins_base.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins_base.py.j2 index 2fd6d8bafe..16cc77ea93 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins_base.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins_base.py.j2 @@ -20,7 +20,9 @@ {% for name, sig in api.mixin_api_signatures.items() %} class _Base{{ name }}: - + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + {{ shared_macros.http_options_method(api.mixin_http_options["{}".format(name)])|indent(8)}} @staticmethod diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index fc4b16f6c8..e75d03a761 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -171,7 +171,14 @@ class {{ service.name }}Transport(abc.ABC): default_timeout={{ method.timeout }}, client_info=client_info, ), - {% endfor %} {# precomputed wrappers loop #} + {% endfor %}{# method in service.methods.values() #} + {% for method_name in api.mixin_api_methods.keys() %} + self.{{ method_name|snake_case }}: gapic_v1.method.wrap_method( + self.{{ method_name|snake_case }}, + default_timeout=None, + client_info=client_info, + ), + {% endfor %} {# method_name in api.mixin_api_methods.keys() #} } def close(self): diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index 69b92f162e..cb3ee8cfa6 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -2,6 +2,9 @@ {% block content %} +import json +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -13,8 +16,11 @@ from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore {% filter sort_lines %} {% set import_ns = namespace(has_operations_mixin=false) %} @@ -42,6 +48,82 @@ from google.longrunning import operations_pb2 # type: ignore {% endfilter %} from .base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2293): Investigate if we can improve this logic + or wait for next gen protobuf. + #} + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "{{ service.meta.address.proto }}", + "rpcName": client_call_details.method, + "request": grpc_request, + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2275): logging `metadata` seems repetitive and may need to be cleaned up. We're including it within "request" for consistency with REST transport. #} + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2293): Investigate if we can improve this logic + or wait for next gen protobuf. + #} + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "{{ service.meta.address.proto }}", + "rpcName": client_call_details.method, + "response": grpc_response, + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2275): logging `metadata` seems repetitive and may need to be cleaned up. We're including it within "request" for consistency with REST transport. #} + "metadata": grpc_response["metadata"], + }, + ) + return response + class {{ service.name }}GrpcTransport({{ service.name }}Transport): """gRPC backend transport for {{ service.name }}. @@ -195,7 +277,10 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @@ -262,7 +347,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsClient( - self.grpc_channel + self._logged_channel ) # Return the client from cache. @@ -292,7 +377,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if '{{ method.transport_safe_name|snake_case }}' not in self._stubs: - self._stubs['{{ method.transport_safe_name|snake_case }}'] = self.grpc_channel.{{ method.grpc_stub_type }}( + self._stubs['{{ method.transport_safe_name|snake_case }}'] = self._logged_channel.{{ method.grpc_stub_type }}( '/{{ '.'.join(method.meta.address.package) }}.{{ service.name }}/{{ method.name }}', request_serializer={{ method.input.ident }}.{% if method.input.ident.python_import.module.endswith('_pb2') %}SerializeToString{% else %}serialize{% endif %}, response_deserializer={{ method.output.ident }}.{% if method.output.ident.python_import.module.endswith('_pb2') %}FromString{% else %}deserialize{% endif %}, @@ -320,7 +405,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/SetIamPolicy", request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, response_deserializer=policy_pb2.Policy.FromString, @@ -346,7 +431,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/GetIamPolicy", request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, response_deserializer=policy_pb2.Policy.FromString, @@ -374,7 +459,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/TestIamPermissions", request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, @@ -383,7 +468,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): {% endif %} def close(self): - self.grpc_channel.close() + self._logged_channel.close() {% include '%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2' %} diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index 8f26b936b1..650f4a2c65 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -4,6 +4,9 @@ {% import "%namespace/%name_%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -16,8 +19,11 @@ from google.api_core import operations_v1 {% endif %} from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore {% filter sort_lines %} @@ -47,6 +53,81 @@ from google.longrunning import operations_pb2 # type: ignore from .base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO from .grpc import {{ service.name }}GrpcTransport +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2293): Investigate if we can improve this logic + or wait for next gen protobuf. + #} + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "{{ service.meta.address.proto }}", + "rpcName": str(client_call_details.method), + "request": grpc_request, + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2275): logging `metadata` seems repetitive and may need to be cleaned up. We're including it within "request" for consistency with REST transport.' #} + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2293): Investigate if we can improve this logic + or wait for next gen protobuf. + #} + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "{{ service.meta.address.proto }}", + "rpcName": str(client_call_details.method), + "response": grpc_response, + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2275): logging `metadata` seems repetitive and may need to be cleaned up. We're including it within "request" for consistency with REST transport.' #} + "metadata": grpc_response["metadata"], + }, + ) + return response + class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): """gRPC AsyncIO backend transport for {{ service.name }}. @@ -242,8 +323,11 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -267,7 +351,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel + self._logged_channel ) # Return the client from cache. @@ -297,7 +381,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if '{{ method.transport_safe_name|snake_case }}' not in self._stubs: - self._stubs['{{ method.transport_safe_name|snake_case }}'] = self.grpc_channel.{{ method.grpc_stub_type }}( + self._stubs['{{ method.transport_safe_name|snake_case }}'] = self._logged_channel.{{ method.grpc_stub_type }}( '/{{ '.'.join(method.meta.address.package) }}.{{ service.name }}/{{ method.name }}', request_serializer={{ method.input.ident }}.{% if method.input.ident.python_import.module.endswith('_pb2') %}SerializeToString{% else %}serialize{% endif %}, response_deserializer={{ method.output.ident }}.{% if method.output.ident.python_import.module.endswith('_pb2') %}FromString{% else %}deserialize{% endif %}, @@ -325,7 +409,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/SetIamPolicy", request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, response_deserializer=policy_pb2.Policy.FromString, @@ -351,7 +435,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/GetIamPolicy", request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, response_deserializer=policy_pb2.Policy.FromString, @@ -380,7 +464,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/TestIamPermissions", request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, @@ -388,12 +472,12 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): return self._stubs["test_iam_permissions"] {% endif %} - {{ shared_macros.prep_wrapped_messages_async_method(service)|indent(4) }} + {{ shared_macros.prep_wrapped_messages_async_method(api, service)|indent(4) }} {{ shared_macros.wrap_async_method_macro()|indent(4) }} def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() @property def kind(self) -> str: @@ -405,4 +489,4 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): __all__ = ( '{{ service.name }}GrpcAsyncIOTransport', ) -{% endblock %} +{% endblock %} \ No newline at end of file diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index d7864e4b12..63adba1920 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -3,9 +3,10 @@ {% block content %} +import logging +import json # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries @@ -40,93 +41,22 @@ try: except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, - rest_version=requests_version, + rest_version=f"requests@{requests_version}", ) -class {{ service.name }}RestInterceptor: - """Interceptor for {{ service.name }}. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the {{ service.name }}RestTransport. - - .. code-block:: python - class MyCustom{{ service.name }}Interceptor({{ service.name }}RestInterceptor): - {% for _, method in service.methods|dictsort if not method.client_streaming %} - def pre_{{ method.name|snake_case }}(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - {% if not method.void %} - def post_{{ method.name|snake_case }}(self, response): - logging.log(f"Received response: {response}") - return response - {% endif %} - -{% endfor %} - transport = {{ service.name }}RestTransport(interceptor=MyCustom{{ service.name }}Interceptor()) - client = {{ service.client_name }}(transport=transport) - - - """ - {% for method in service.methods.values()|sort(attribute="name") if not method.client_streaming and method.http_options %} - def pre_{{ method.name|snake_case }}(self, request: {{method.input.ident}}, metadata: Sequence[Tuple[str, str]]) -> Tuple[{{method.input.ident}}, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for {{ method.name|snake_case }} - - Override in a subclass to manipulate the request or metadata - before they are sent to the {{ service.name }} server. - """ - return request, metadata - - {% if not method.void %} - {% if not method.server_streaming %} - def post_{{ method.name|snake_case }}(self, response: {{method.output.ident}}) -> {{method.output.ident}}: - {% else %} - def post_{{ method.name|snake_case }}(self, response: rest_streaming.ResponseIterator) -> rest_streaming.ResponseIterator: - {% endif %} - """Post-rpc interceptor for {{ method.name|snake_case }} - - Override in a subclass to manipulate the response - after it is returned by the {{ service.name }} server but before - it is returned to user code. - """ - return response - {% endif %} - {% endfor %} - - {% for name, signature in api.mixin_api_signatures.items() %} - def pre_{{ name|snake_case }}( - self, request: {{signature.request_type}}, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[{{signature.request_type}}, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for {{ name|snake_case }} - - Override in a subclass to manipulate the request or metadata - before they are sent to the {{ service.name }} server. - """ - return request, metadata - - def post_{{ name|snake_case }}( - self, response: {{signature.response_type}} - ) -> {{signature.response_type}}: - """Post-rpc interceptor for {{ name|snake_case }} - - Override in a subclass to manipulate the response - after it is returned by the {{ service.name }} server but before - it is returned to user code. - """ - return response - {% endfor %} +{{ shared_macros.create_interceptor_class(api, service, method, is_async=False) }} @dataclasses.dataclass @@ -274,14 +204,14 @@ class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): {% if method.http_options and not method.client_streaming %} {% set body_spec = method.http_options[0].body %} - {{ shared_macros.response_method(body_spec)|indent(8) }} + {{ shared_macros.response_method(body_spec, is_async=False, is_streaming_method=method.server_streaming)|indent(8) }} {% endif %}{# method.http_options and not method.client_streaming #} def __call__(self, request: {{method.input.ident}}, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + {{ shared_macros.client_method_metadata_argument()|indent(8) }}={{ shared_macros.client_method_metadata_default_value() }}, ){% if not method.void %} -> {% if not method.server_streaming %}{{method.output.ident}}{% else %}rest_streaming.ResponseIterator{% endif %}{% endif %}: {% if method.http_options and not method.client_streaming %} r"""Call the {{- ' ' -}} @@ -296,8 +226,7 @@ class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(8) }} {% if not method.void %} Returns: @@ -306,7 +235,7 @@ class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): {% endif %} """ - {{ shared_macros.rest_call_method_common(body_spec, method.name, service.name)|indent(8) }} + {{ shared_macros.rest_call_method_common(body_spec, method.name, service, False, method.output.ident.is_proto_plus_type)|indent(8) }} {% if not method.void %} # Return the response @@ -325,7 +254,35 @@ class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) {% endif %}{# method.lro #} + {#- TODO(https://github.com/googleapis/gapic-generator-python/issues/2274): Add debug log before intercepting a request #} resp = self._interceptor.post_{{ method.name|snake_case }}(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_{{ method.name|snake_case }}_with_metadata(resp, response_metadata) + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2279): Add logging support for rest streaming. #} + {% if not method.server_streaming %} + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = {% if method.output.ident.is_proto_plus_type %}{{ method.output.ident }}.to_json(response){% else %}json_format.MessageToJson(resp){% endif %} + + except: + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2283): Remove try/except once unit tests are updated. #} + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for {{ service.meta.address.proto_package_versioned }}.{{ service.client_name }}.{{ method.transport_safe_name|snake_case }}", + extra = { + "serviceName": "{{ service.meta.address.proto }}", + "rpcName": "{{ method.name }}", + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2275): logging `metadata` seems repetitive and may need to be cleaned up #} + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + {% endif %}{# if not method.server_streaming #} return resp {% endif %}{# method.void #} diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 index ab87c32c76..79d54e4f2a 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 @@ -1,24 +1,93 @@ +{% import "%namespace/%name_%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove the following variable (and the condition later in this file) for async rest transport once support for it is GA. #} {% set rest_async_io_enabled = api.all_library_settings[api.naming.proto_package].python_settings.experimental_features.rest_async_io_enabled %} {% extends '_base.py.j2' %} {% block content %} + +import google.auth +try: + import aiohttp # type: ignore + from google.auth.aio.transport.sessions import AsyncAuthorizedSession # type: ignore + from google.api_core import rest_streaming_async # type: ignore + from google.api_core.operations_v1 import AsyncOperationsRestClient # type: ignore +{# NOTE: `pragma: NO COVER` is needed since the coverage for presubmits isn't combined. #} +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2200): Add coverage for ImportError. #} +except ImportError as e: # pragma: NO COVER + raise ImportError("`rest_asyncio` transport requires the library to be installed with the `async_rest` extra. Install the library with the `async_rest` extra using `pip install {{ api.naming.warehouse_package_name }}[async_rest]`") from e + +from google.auth.aio import credentials as ga_credentials_async # type: ignore + +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 +{% if service.has_lro %} +from google.api_core import operations_v1 +{% endif %}{# service.has_lro #} +{% if opts.add_iam_methods or api.has_iam_mixin %} +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +{% endif %} +{% if api.has_location_mixin %} +from google.cloud.location import locations_pb2 # type: ignore +{% endif %} +from google.api_core import retry_async as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming_async # type: ignore + + +from google.protobuf import json_format +{% if service.has_lro %} +from google.api_core import operations_v1 +{% endif %} +{% if opts.add_iam_methods or api.has_iam_mixin %} +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +{% endif %} +{% if api.has_location_mixin %} +from google.cloud.location import locations_pb2 # type: ignore +{% endif %} -from typing import Any, Optional +import json # type: ignore +import dataclasses +from typing import Any, Dict, List, Callable, Tuple, Optional, Sequence, Union + +{{ shared_macros.operations_mixin_imports(api, service, opts) }} from .rest_base import _Base{{ service.name }}RestTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -{# TODO (https://github.com/googleapis/gapic-generator-python/issues/2128): Update `rest_version` to include the transport dependency version. #} + +import logging + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, - rest_version=None, + rest_version=f"google-auth@{google.auth.__version__}", ) +{{ shared_macros.create_interceptor_class(api, service, method, is_async=True) }} + +@dataclasses.dataclass +class Async{{service.name}}RestStub: + _session: AsyncAuthorizedSession + _host: str + _interceptor: Async{{service.name}}RestInterceptor + class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): """Asynchronous REST backend transport for {{ service.name }}. @@ -30,12 +99,13 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, + def __init__(self, + *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, - {# TODO (https://github.com/googleapis/gapic-generator-python/issues/2129): Update the default type for credentials. #} - credentials: Optional[Any] = None, + credentials: Optional[ga_credentials_async.Credentials] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, url_scheme: str = 'https', + interceptor: Optional[Async{{ service.name }}RestInterceptor] = None, ) -> None: """Instantiate the transport. @@ -48,8 +118,7 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): Args: host ({% if service.host %}Optional[str]{% else %}str{% endif %}): {{ ' ' }}The hostname to connect to {% if service.host %}(default: '{{ service.host }}'){% endif %}. - {# TODO (https://github.com/googleapis/gapic-generator-python/issues/2129): Update the default type for credentials. #} - credentials (Optional[Any]): The + credentials (Optional[google.auth.aio.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the @@ -72,9 +141,186 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): url_scheme=url_scheme, api_audience=None ) + {# Note: Type for creds is ignored because of incorrect type hint for creds in the client layer. + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2177): Remove `# type: ignore` once + # we update the type hints for credentials to include asynchronous credentials in the client layer. + #} + self._session = AsyncAuthorizedSession(self._credentials) # type: ignore + self._interceptor = interceptor or Async{{ service.name }}RestInterceptor() + self._wrap_with_kind = True + self._prep_wrapped_messages(client_info) + {% if service.has_lro %} + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = None + {% endif %} + + + {{ shared_macros.prep_wrapped_messages_async_method(api, service)|indent(4) }} + + {{ shared_macros.wrap_async_method_macro()|indent(4) }} + + {% for method in service.methods.values()|sort(attribute="name") %} + class _{{method.name}}(_Base{{ service.name }}RestTransport._Base{{method.name}}, Async{{service.name}}RestStub): + def __hash__(self): + return hash("Async{{service.name}}RestTransport.{{method.name}}") + + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2169): Implement client streaming method. #} + {% if method.http_options and not method.client_streaming %} + {% set body_spec = method.http_options[0].body %} + {{ shared_macros.response_method(body_spec, is_async=True, is_streaming_method=None)|indent(8) }} + + {% endif %}{# method.http_options and not method.client_streaming and not method.paged_result_field #} + async def __call__(self, + request: {{method.input.ident}}, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + {{ shared_macros.client_method_metadata_argument()|indent(12) }}={{ shared_macros.client_method_metadata_default_value() }}, + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2169): Update return type for client streaming method. #} + ){% if not method.void %} -> {% if not method.server_streaming %}{{method.output.ident}}{% else %}rest_streaming_async.AsyncResponseIterator{% endif %}{% endif %}: + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2169): Implement client streaming method. #} + {% if method.http_options and not method.client_streaming %} + r"""Call the {{- ' ' -}} + {{ (method.name|snake_case).replace('_',' ')|wrap( + width=70, offset=45, indent=8) }} + {{- ' ' -}} method over HTTP. + + Args: + request (~.{{ method.input.ident }}): + The request object.{{ ' ' }} + {{- method.input.meta.doc|rst(width=72, indent=16, nl=False) }} + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + {{ shared_macros.client_method_metadata_argument_doc()|indent(8) }} + {% if not method.void %} + + Returns: + ~.{{ method.output.ident }}: + {{ method.output.meta.doc|rst(width=72, indent=16) }} + {% endif %} + """ + + {{ shared_macros.rest_call_method_common(body_spec, method.name, service, True, method.output.ident.is_proto_plus_type)|indent(8) }} + + {% if not method.void %} + # Return the response + {% if method.server_streaming %} + resp = rest_streaming_async.AsyncResponseIterator(response, {{method.output.ident}}) + {% else %} + resp = {{method.output.ident}}() + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2189): Investigate if the proto-plus conversion below is needed for a streaming response. #} + {% if method.output.ident.is_proto_plus_type %} + pb_resp = {{method.output.ident}}.pb(resp) + {% else %} + pb_resp = resp + {% endif %}{# if method.output.ident.is_proto_plus_type #} + content = await response.read() + json_format.Parse(content, pb_resp, ignore_unknown_fields=True) + {% endif %}{# if method.server_streaming #} + resp = await self._interceptor.post_{{ method.name|snake_case }}(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = await self._interceptor.post_{{ method.name|snake_case }}_with_metadata(resp, response_metadata) + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2279): Add logging support for rest streaming. #} + {% if not method.server_streaming %} + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = {% if method.output.ident.is_proto_plus_type %}{{ method.output.ident }}.to_json(response){% else %}json_format.MessageToJson(resp){% endif %} + + except: + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2283): Remove try/except once unit tests are updated. #} + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly + } + _LOGGER.debug( + "Received response for {{ service.meta.address.proto_package_versioned }}.{{ service.async_client_name }}.{{ method.transport_safe_name|snake_case }}", + extra = { + "serviceName": "{{ service.meta.address.proto }}", + "rpcName": "{{ method.name }}", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + + {% endif %}{# if not method.server_streaming #} + return resp + + {% endif %}{# method.void #} + + {% else %} + raise NotImplementedError( + "Method {{ method.name }} is not available over REST transport" + ) + {% endif %}{# method.http_options and not method.client_streaming #} + + {% endfor %} + + {% if service.has_lro %} + + @property + def operations_client(self) -> AsyncOperationsRestClient: + """Create the async client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + {% for selector, rules in api.http_options.items() %} + {% if selector.startswith('google.longrunning.Operations') %} + '{{ selector }}': [ + {% for rule in rules %} + { + 'method': '{{ rule.method }}', + 'uri': '{{ rule.uri }}', + {% if rule.body %} + 'body': '{{ rule.body }}', + {% endif %}{# rule.body #} + }, + {% endfor %}{# rules #} + ], + {% endif %}{# selector.startswith Operations #} + {% endfor %}{# http_options #} + } + + rest_transport = operations_v1.AsyncOperationsRestTransport( # type: ignore + host=self._host, + # use the credentials which are saved + {# Note: Type for creds is ignored because of incorrect type hint for creds in the client layer. + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2177): Remove `# type: ignore` once + # we update the type hints for credentials to include asynchronous credentials in the client layer. + #} + credentials=self._credentials, # type: ignore + http_options=http_options, + path_prefix="{{ service.client_package_version }}" + ) + + self._operations_client = AsyncOperationsRestClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + {% endif %} + + {% for method in service.methods.values()|sort(attribute="name") %} + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2154): Remove `type: ignore`. #} + @property + def {{method.transport_safe_name|snake_case}}(self) -> Callable[ + [{{method.input.ident}}], + {{method.output.ident}}]: + return self._{{method.name}}(self._session, self._host, self._interceptor) # type: ignore + + {% endfor %} + {% for name, sig in api.mixin_api_signatures.items() %} + {{ shared_macros.generate_mixin_call_method(service, api, name, sig, is_async=True) | indent(4) }} + {% endfor %} @property def kind(self) -> str: return "rest_asyncio" + async def close(self): + await self._session.close() + {% endblock %} diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_base.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_base.py.j2 index 9e4f26fcb3..b79785afc5 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_base.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_base.py.j2 @@ -31,7 +31,6 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore {% endif %} from .base import {{service.name}}Transport, DEFAULT_CLIENT_INFO -from google.auth import credentials as ga_credentials # type: ignore import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union @@ -56,7 +55,7 @@ class _Base{{ service.name }}RestTransport({{service.name}}Transport): {# TODO: handle mtls stuff if that is relevant for rest transport #} def __init__(self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, - credentials: Optional[ga_credentials.Credentials] = None, + credentials: Optional[Any] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, url_scheme: str = 'https', @@ -66,7 +65,12 @@ class _Base{{ service.name }}RestTransport({{service.name}}Transport): Args: host ({% if service.host %}Optional[str]{% else %}str{% endif %}): {{ ' ' }}The hostname to connect to {% if service.host %}(default: '{{ service.host }}'){% endif %}. - credentials (Optional[google.auth.credentials.Credentials]): The + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2173): Type hint for credentials is + # set to `Any` to support async and sync credential types in the parent rest transport classes. + # However, we should have a stronger type here such as an abstract base credentials + # class leveraged by sync and async credential classes. + #} + credentials (Optional[Any]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the diff --git a/gapic/templates/_pypi_packages.j2 b/gapic/templates/_pypi_packages.j2 index 0136654f16..de43f16f5b 100644 --- a/gapic/templates/_pypi_packages.j2 +++ b/gapic/templates/_pypi_packages.j2 @@ -11,8 +11,8 @@ Note: Set the minimum version for google-cloud-documentai to 2.0.0 which has sup ("google", "apps", "script", "type"): {"package_name": "google-apps-script-type", "lower_bound": "0.2.0", "upper_bound": "1.0.0dev"}, ("google", "geo", "type"): {"package_name": "google-geo-type", "lower_bound": "0.1.0", "upper_bound": "1.0.0dev"}, ("google", "identity", "accesscontextmanager", "v1"): {"package_name": "google-cloud-access-context-manager", "lower_bound": "0.1.2", "upper_bound": "1.0.0dev"}, - ("google", "cloud", "documentai", "v1"): {"package_name": "google-cloud-documentai", "lower_bound": "2.0.0", "upper_bound": "3.0.0dev"}, - ("google", "cloud", "kms", "v1"): {"package_name": "google-cloud-kms", "lower_bound": "2.3.0", "upper_bound": "3.0.0dev"}, + ("google", "cloud", "documentai", "v1"): {"package_name": "google-cloud-documentai", "lower_bound": "2.0.0", "upper_bound": "4.0.0dev"}, + ("google", "cloud", "kms", "v1"): {"package_name": "google-cloud-kms", "lower_bound": "2.3.0", "upper_bound": "4.0.0dev"}, ("google", "cloud", "osconfig", "v1"): {"package_name": "google-cloud-os-config", "lower_bound": "1.0.0", "upper_bound": "2.0.0dev"}, ("google", "iam", "v1"): {"package_name": "grpc-google-iam-v1", "lower_bound": "0.12.4", "upper_bound": "1.0.0dev"}, ("google", "iam", "v2"): {"package_name": "google-cloud-iam", "lower_bound": "2.12.2", "upper_bound": "3.0.0dev"}, diff --git a/gapic/templates/docs/index.rst.j2 b/gapic/templates/docs/index.rst.j2 index c8dea9adbc..890509be79 100644 --- a/gapic/templates/docs/index.rst.j2 +++ b/gapic/templates/docs/index.rst.j2 @@ -3,5 +3,5 @@ API Reference .. toctree:: :maxdepth: 2 - {{ api.naming.versioned_module_name }}/services - {{ api.naming.versioned_module_name }}/types + {{ api.naming.versioned_module_name }}/services_ + {{ api.naming.versioned_module_name }}/types_ diff --git a/gapic/templates/noxfile.py.j2 b/gapic/templates/noxfile.py.j2 index 452c706135..18505d5434 100644 --- a/gapic/templates/noxfile.py.j2 +++ b/gapic/templates/noxfile.py.j2 @@ -18,7 +18,8 @@ ALL_PYTHON = [ "3.9", "3.10", "3.11", - "3.12" + "3.12", + "3.13", ] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -28,7 +29,7 @@ PACKAGE_NAME = '{{ api.naming.warehouse_package_name }}' BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.12" +DEFAULT_PYTHON_VERSION = "3.13" nox.sessions = [ "unit", @@ -50,7 +51,7 @@ nox.sessions = [ def unit(session, protobuf_implementation): """Run the unit test suite.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') @@ -74,6 +75,7 @@ def unit(session, protobuf_implementation): "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2201) Add a `unit_rest_async` nox session to run tests with [async_rest] extra installed. #} @nox.session(python=ALL_PYTHON[-1]) @nox.parametrize( @@ -83,7 +85,7 @@ def unit(session, protobuf_implementation): def prerelease_deps(session, protobuf_implementation): """Run the unit test suite against pre-release versions of dependencies.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install test environment dependencies @@ -118,7 +120,8 @@ def prerelease_deps(session, protobuf_implementation): "googleapis-common-protos", "google-api-core", "google-auth", - "grpcio", + # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 + "grpcio!=1.67.0rc1", "grpcio-status", "protobuf", "proto-plus", diff --git a/gapic/templates/setup.py.j2 b/gapic/templates/setup.py.j2 index 48427184f9..7ebab02cbf 100644 --- a/gapic/templates/setup.py.j2 +++ b/gapic/templates/setup.py.j2 @@ -1,3 +1,5 @@ +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove the following variable (and the condition later in this file) for async rest transport once support for it is GA. #} +{% set rest_async_io_enabled = api.all_library_settings[api.naming.proto_package].python_settings.experimental_features.rest_async_io_enabled %} {% extends '_base.py.j2' %} {% from '_pypi_packages.j2' import pypi_packages %} {% block content %} @@ -35,6 +37,7 @@ dependencies = [ # See https://github.com/googleapis/google-cloud-python/issues/12364 "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", + "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", {# Explicitly exclude protobuf versions mentioned in https://cloud.google.com/support/bulletins#GCP-2022-019 #} "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", {% for package_tuple, package_info in pypi_packages.items() %} @@ -46,6 +49,14 @@ dependencies = [ {% endif %} {% endfor %} ] +extras = { +{% if rest_async_io_enabled %} + "async_rest": [ + "google-api-core[grpc] >= 2.21.0, < 3.0.0dev", + "google-auth[aiohttp] >= 2.35.0, <3.0.0dev" + ], +{% endif %} +} url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/{{ api.naming.warehouse_package_name }}" package_root = os.path.abspath(os.path.dirname(__file__)) @@ -81,6 +92,7 @@ setuptools.setup( "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Operating System :: OS Independent", "Topic :: Internet", ], @@ -88,6 +100,7 @@ setuptools.setup( packages=packages, python_requires=">=3.7", install_requires=dependencies, + extras_require=extras, include_package_data=True, zip_safe=False, ) diff --git a/gapic/templates/testing/constraints-3.13.txt.j2 b/gapic/templates/testing/constraints-3.13.txt.j2 new file mode 100644 index 0000000000..615c99518a --- /dev/null +++ b/gapic/templates/testing/constraints-3.13.txt.j2 @@ -0,0 +1,4 @@ +# -*- coding: utf-8 -*- +{% block constraints %} +{% include "testing/_default_constraints.j2" %} +{% endblock %} diff --git a/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 b/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 index 33cbd8117d..169807a961 100644 --- a/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 +++ b/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 @@ -1,67 +1,3 @@ -{% if 'rest' in opts.transport %} -{% for name, sig in api.mixin_api_signatures.items() %} - -def test_{{ name|snake_case }}_rest_bad_request(transport: str = 'rest', request_type={{ sig.request_type }}): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({{ api.mixin_http_options["{}".format(name)][0].sample_request }}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.{{ name|snake_case }}(request) - -@pytest.mark.parametrize("request_type", [ - {{ sig.request_type }}, - dict, -]) -def test_{{ name|snake_case }}_rest(request_type): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {{ api.mixin_http_options["{}".format(name)][0].sample_request }} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - {% if sig.response_type == "None" %} - return_value = None - {% else %} - return_value = {{ sig.response_type }}() - {% endif %} - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - {% if sig.response_type == "None" %} - json_return_value = '{}' - {% else %} - json_return_value = json_format.MessageToJson(return_value) - {% endif %} - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.{{ name|snake_case }}(request) - - # Establish that the response is the type that we expect. - {% if sig.response_type == "None" %} - assert response is None - {% else %} - assert isinstance(response, {{ sig.response_type }}) - {% endif %} -{% endfor %} -{% endif %} - {% if api.has_operations_mixin and ('grpc' in opts.transport or 'grpc_asyncio' in opts.transport) %} {% if "DeleteOperation" in api.mixin_api_methods %} diff --git a/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index c77d28fef5..9507357fb9 100644 --- a/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -20,7 +20,7 @@ except ImportError: # pragma: NO COVER import grpc from grpc.experimental import aio {% if "rest" in opts.transport %} -from collections.abc import Iterable +from collections.abc import Iterable, AsyncIterable from google.protobuf import json_format {% endif %} import json @@ -30,6 +30,16 @@ from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers {% if 'rest' in opts.transport %} +{% if rest_async_io_enabled %} +try: + import aiohttp # type: ignore + from google.auth.aio.transport.sessions import AsyncAuthorizedSession + from google.api_core.operations_v1 import AsyncOperationsRestClient + HAS_ASYNC_REST_EXTRA = True +{# NOTE: `pragma: NO COVER` is needed since the coverage for presubmits isn't combined. #} +except ImportError: # pragma: NO COVER + HAS_ASYNC_REST_EXTRA = False +{% endif %}{# if rest_async_io_enabled #} from requests import Response from requests import Request, PreparedRequest from requests.sessions import Session @@ -39,6 +49,7 @@ from google.protobuf import json_format try: from google.auth.aio import credentials as ga_credentials_async HAS_GOOGLE_AUTH_AIO = True +{# NOTE: `pragma: NO COVER` is needed since the coverage for presubmits isn't combined. #} except ImportError: # pragma: NO COVER HAS_GOOGLE_AUTH_AIO = False @@ -91,6 +102,7 @@ from google.iam.v1 import policy_pb2 # type: ignore {% endfilter %} {{ shared_macros.add_google_api_core_version_header_import(service.version) }} + CRED_INFO_JSON = { "credential_source": "/path/to/file", "credential_type": "service account credentials", @@ -98,6 +110,13 @@ CRED_INFO_JSON = { } CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -362,7 +381,6 @@ def test__validate_universe_domain(client_class, transport_class, transport_name with pytest.raises(ValueError): client._compare_universes("foo.bar", None) - @pytest.mark.parametrize("client_class,transport_name", [ {% if 'grpc' in opts.transport %} ({{ service.client_name }}, "grpc"), @@ -975,7 +993,7 @@ def test_{{ service.client_name|snake_case }}_create_channel_credentials_file(cl {% endfor -%} {#- method in methods for rest #} {% for method in service.methods.values() if 'rest' in opts.transport and - not method.http_options %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.name|snake_case %} + not method.http_options %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.safe_name|snake_case %} def test_{{ method_name }}_rest_error(): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), @@ -1090,16 +1108,26 @@ def test_transport_adc(transport_class): {% set configs = [] %} {% for transport in opts.transport %} - {% do configs.append({'service':service, 'transport':transport, 'is_async':false}) %} + {% do configs.append({'service':service, 'api':api, 'transport':transport, 'is_async':false}) %} {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove this condition when async rest is GA. #} {% if 'grpc' in transport or rest_async_io_enabled %} - {% do configs.append({'service':service, 'transport':transport, 'is_async':true}) %} + {% do configs.append({'service':service, 'api':api, 'transport':transport, 'is_async':true}) %} {% endif %} {% endfor %} {% for conf in configs %} {{ test_macros.transport_kind_test(**conf) }} -{% endfor %} +{{ test_macros.run_transport_tests_for_config(**conf) }} +{{ test_macros.empty_call_test(**conf) }} +{{ test_macros.routing_parameter_test(**conf) }} +{% if service.has_lro %} +{{ test_macros.lro_client_test(**conf) }} +{% endif %}{# service.has_lro #} +{% endfor %} +{% if rest_async_io_enabled %} +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove the below macro when async rest is GA and supports all required parameters. #} +{{ test_macros.async_rest_unsupported_params_test(service) }} +{% endif %} {% if 'grpc' in opts.transport %} def test_transport_grpc_default(): # A client should use the gRPC transport by default. @@ -1403,26 +1431,6 @@ def test_{{ service.name|snake_case }}_http_transport_client_cert_source_for_mtl ) mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - -{% if service.has_lro -%} -def test_{{ service.name|snake_case }}_rest_lro_client(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client -{%- endif %} - - {% endif %} {# rest #} @pytest.mark.parametrize("transport_name", [ @@ -1713,40 +1721,11 @@ def test_client_with_default_client_info(): ) prep.assert_called_once_with(client_info) -{% if 'grpc' in opts.transport %} -@pytest.mark.asyncio -async def test_transport_close_async(): - client = {{ service.async_client_name }}( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() -{% endif %} - {% include 'tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2' %} -def test_transport_close(): - transports = { - {% if 'rest' in opts.transport %} - "rest": "_session", - {% endif %} - {% if 'grpc' in opts.transport %} - "grpc": "_grpc_channel", - {% endif %} - } - - for transport, close_name in transports.items(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() +{% for conf in configs %} +{{ test_macros.transport_close_test(**conf) }} +{% endfor %} def test_client_ctx(): transports = [ diff --git a/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 2f2fb3b74f..33b76774c2 100644 --- a/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -1,6 +1,5 @@ {% macro grpc_required_tests(method, service, api, full_extended_lro=False) %} {% with method_name = method.safe_name|snake_case + "_unary" if method.extended_lro and not full_extended_lro else method.safe_name|snake_case, method_output = method.extended_lro.operation_type if method.extended_lro and not full_extended_lro else method.output %} -{% with uuid4_re = "[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}" %} @pytest.mark.parametrize("request_type", [ {{ method.input.ident }}, dict, @@ -123,9 +122,6 @@ def test_{{ method_name }}(request_type, transport: str = 'grpc'): {% if not method.client_streaming %} -{{ empty_call_test(method, method_name, service, api, uuid4_re)}} - - def test_{{ method_name }}_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -155,7 +151,7 @@ def test_{{ method_name }}_non_empty_request_with_auto_populated_field(): {% if method_settings is not none %} {% for auto_populated_field in method_settings.auto_populated_fields %} # Ensure that the uuid4 field is set according to AIP 4235 - assert re.match(r"{{ uuid4_re }}", args[0].{{ auto_populated_field }}) + assert re.match(r"{{ get_uuid4_re() }}", args[0].{{ auto_populated_field }}) # clear UUID field so that the check below succeeds args[0].{{ auto_populated_field }} = None {% endfor %} @@ -218,10 +214,6 @@ def test_{{ method_name }}_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 {% if not full_extended_lro %} -{% if not method.client_streaming %} -{{ empty_call_test(method, method_name, service, api, uuid4_re, is_async=True) }} -{% endif %} - @pytest.mark.asyncio async def test_{{ method_name }}_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -246,7 +238,7 @@ async def test_{{ method_name }}_async_use_cached_wrapped_rpc(transport: str = " {% if method.client_streaming %} request = [{}] - await client.{{ method.name|snake_case }}(request) + await client.{{ method.safe_name|snake_case }}(request) {% else %} request = {} await client.{{ method_name }}(request) @@ -263,7 +255,7 @@ async def test_{{ method_name }}_async_use_cached_wrapped_rpc(transport: str = " {% endif %} {% if method.client_streaming %} - await client.{{ method.name|snake_case }}(request) + await client.{{ method.safe_name|snake_case }}(request) {% else %} await client.{{ method_name }}(request) {% endif %} @@ -329,9 +321,9 @@ async def test_{{ method_name }}_async(transport: str = 'grpc_asyncio', request_ )) {% endif %} {% if method.client_streaming and method.server_streaming %} - response = await client.{{ method.name|snake_case }}(iter(requests)) + response = await client.{{ method.safe_name|snake_case }}(iter(requests)) {% elif method.client_streaming and not method.server_streaming %} - response = await (await client.{{ method.name|snake_case }}(iter(requests))) + response = await (await client.{{ method.safe_name|snake_case }}(iter(requests))) {% else %} response = await client.{{ method_name }}(request) {% endif %} @@ -394,44 +386,6 @@ async def test_{{ method_name }}_async_from_dict(): await test_{{ method_name }}_async(request_type=dict) {% endif %}{# full_extended_lro #} -{% if method.explicit_routing %} -def test_{{ method.name|snake_case }}_routing_parameters(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - ) - - {% for routing_param in method.routing_rule.routing_parameters %} - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = {{ method.input.ident }}(**{{ routing_param.sample_request }}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.{{ method.transport_safe_name|snake_case }}), - '__call__') as call: - {% if method.void %} - call.return_value = None - {% elif method.lro %} - call.return_value = operations_pb2.Operation(name='operations/op') - {% elif method.server_streaming %} - call.return_value = iter([{{ method.output.ident }}()]) - {% else %} - call.return_value = {{ method.output.ident }}() - {% endif %} - client.{{ method.safe_name|snake_case }}(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - {% endfor %} -{% endif %} - - {% if method.field_headers and not method.client_streaming and not method.explicit_routing %} def test_{{ method_name }}_field_headers(): client = {{ service.client_name }}( @@ -1044,211 +998,12 @@ def test_{{ method_name }}_raw_page_lro(): response = {{ method.lro.response_type.ident }}() assert response.raw_page is response {% endif %}{# method.paged_result_field #}{% endwith %} -{% endwith %}{# uuid4_re #} {% endmacro %} {% macro rest_required_tests(method, service, numeric_enums=False, full_extended_lro=False) %} -{% with method_name = method.safe_name|snake_case + "_unary" if method.extended_lro and not full_extended_lro else method.name|snake_case, method_output = method.extended_lro.operation_type if method.extended_lro and not full_extended_lro else method.output %}{% if method.http_options %} +{% with method_name = method.safe_name|snake_case + "_unary" if method.extended_lro and not full_extended_lro else method.safe_name|snake_case, method_output = method.extended_lro.operation_type if method.extended_lro and not full_extended_lro else method.output %}{% if method.http_options %} {# TODO(kbandes): remove this if condition when lro and client streaming are supported. #} {% if not method.client_streaming %} -@pytest.mark.parametrize("request_type", [ - {{ method.input.ident }}, - dict, -]) -def test_{{ method_name }}_rest(request_type): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {{ method.http_options[0].sample_request(method) }} - {% for field in method.body_fields.values() %} - {% if not field.oneof or field.proto3_optional %} - {# ignore oneof fields that might conflict with sample_request #} - request_init["{{ field.name }}"] = {{ field.merged_mock_value(method.http_options[0].sample_request(method).get(field.name)) }} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = {{ method.input.ident }}.meta.fields["{{ field.name }}"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["{{ field.name }}"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["{{ field.name }}"][field])): - del request_init["{{ field.name }}"][field][i][subfield] - else: - del request_init["{{ field.name }}"][field][subfield] - {% endif %} - {% endfor %} - request = request_type(**request_init) - {% if method.client_streaming %} - requests = [request] - {% endif %} - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - {% if method.void %} - return_value = None - {% elif method.lro %} - return_value = operations_pb2.Operation(name='operations/spam') - {% elif method.extended_lro %} - return_value = {{ method.extended_lro.operation_type.ident }}( - {% for field in method.extended_lro.operation_type.fields.values() | rejectattr('message')%} - {% if not field.oneof or field.proto3_optional %} - {{ field.name }}={{ field.mock_value }}, - {% endif %}{% endfor %} - {# This is a hack to only pick one field #} - {% for oneof_fields in method.output.oneof_fields().values() %} - {% if (oneof_fields | rejectattr('message') | list) %} - {% with field = (oneof_fields | rejectattr('message') | first) %} - {{ field.name }}={{ field.mock_value }}, - {% endwith %} - {% endif %} - {% endfor %} - ) - {% else %} - return_value = {{ method.output.ident }}( - {% for field in method.output.fields.values() | rejectattr('message')%} - {% if not field.oneof or field.proto3_optional %} - {{ field.name }}={{ field.mock_value }}, - {% endif %}{% endfor %} - {# This is a hack to only pick one field #} - {% for oneof_fields in method.output.oneof_fields().values() %} - {% if (oneof_fields | rejectattr('message') | list) %} - {% with field = (oneof_fields | rejectattr('message') | first) %} - {{ field.name }}={{ field.mock_value }}, - {% endwith %} - {% endif %} - {% endfor %} - ) - {% endif %} - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - {% if method.void %} - json_return_value = '' - {% elif method.lro %} - json_return_value = json_format.MessageToJson(return_value) - {% else %} - {% if method.output.ident.is_proto_plus_type %} - # Convert return value to protobuf type - return_value = {{ method.output.ident }}.pb(return_value) - {% endif %} - json_return_value = json_format.MessageToJson(return_value) - {% endif %} - - {% if method.server_streaming %} - json_return_value = "[{}]".format(json_return_value) - {% endif %} - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - {% if method.client_streaming %} - response = client.{{ method_name }}(iter(requests)) - {% elif method.server_streaming %} - with mock.patch.object(response_value, 'iter_content') as iter_content: - iter_content.return_value = iter(json_return_value) - response = client.{{ method_name }}(request) - {% else %} - response = client.{{ method_name }}(request) - {% endif %} - - {% if "next_page_token" in method_output.fields.values()|map(attribute='name', default="") and not method.paged_result_field %} - {# Cheeser assertion to force code coverage for bad paginated methods #} - assert response.raw_page is response - - {% endif %} - - {% if method.server_streaming %} - assert isinstance(response, Iterable) - response = next(response) - {% endif %} - - # Establish that the response is the type that we expect. - {% if method.void %} - assert response is None - {% elif method.lro %} - assert response.operation.name == "operations/spam" - {% elif method.extended_lro and not full_extended_lro %} - assert isinstance(response, {{ method.extended_lro.operation_type.ident }}) - {% else %} - assert isinstance(response, {{ method.client_output.ident }}) - {% for field in method_output.fields.values() | rejectattr('message') %} - {% if not field.oneof or field.proto3_optional %} - {% if field.field_pb.type in [1, 2] %}{# Use approx eq for floats #} - {% if field.repeated %} - for index in range(len(response.{{ field.name }})): - assert math.isclose( - response.{{ field.name }}[index], - {{ field.mock_value }}[index], - rel_tol=1e-6, - ) - {% else %}{# field.repeated #} - assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) - {% endif %}{# field.repeated #} - {% elif field.field_pb.type == 8 %}{# Use 'is' for bools #} - assert response.{{ field.name }} is {{ field.mock_value }} - {% else %} - assert response.{{ field.name }} == {{ field.mock_value }} - {% endif %} - {% endif %}{# end oneof/optional #} - {% endfor %} - {% endif %} - def test_{{ method_name }}_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1416,6 +1171,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} {% if method.client_streaming %} response = client.{{ method_name }}(iter(requests)) @@ -1457,104 +1213,12 @@ def test_{{ method_name }}_rest_unset_required_fields(): {% endif %}{# required_fields #} -{% if not method.client_streaming %} -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_{{ method_name }}_rest_interceptors(null_interceptor): - transport = transports.{{ service.name }}RestTransport( +{% if method.flattened_fields and not method.client_streaming %} +def test_{{ method_name }}_rest_flattened(): + client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.{{ service.name}}RestInterceptor(), - ) - client = {{ service.client_name }}(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - {% if method.lro %} - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - {% endif %} - {% if not method.void %} - mock.patch.object(transports.{{ service.name }}RestInterceptor, "post_{{method.name|snake_case}}") as post, \ - {% endif %} - mock.patch.object(transports.{{ service.name }}RestInterceptor, "pre_{{ method.name|snake_case }}") as pre: - pre.assert_not_called() - {% if not method.void %} - post.assert_not_called() - {% endif %} - {% if method.input.ident.is_proto_plus_type %} - pb_message = {{ method.input.ident }}.pb({{ method.input.ident }}()) - {% else %} - pb_message = {{ method.input.ident }}() - {% endif %} - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - {% if not method.void %} - req.return_value._content = {% if method.output.ident.is_proto_plus_type %}{{ method.output.ident }}.to_json({{ method.output.ident }}()){% else %}json_format.MessageToJson({{ method.output.ident }}()){% endif %} - - {% if method.server_streaming %} - req.return_value._content = "[{}]".format(req.return_value._content) - {% endif %} - - {% endif %} - - request = {{ method.input.ident }}() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - {% if not method.void %} - post.return_value = {{ method.output.ident }}() - {% endif %} - - client.{{ method_name }}(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - {% if not method.void %} - post.assert_called_once() - {% endif %} - -{% endif %}{# streaming #} - - -def test_{{ method_name }}_rest_bad_request(transport: str = 'rest', request_type={{ method.input.ident }}): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {{ method.http_options[0].sample_request(method) }} - request = request_type(**request_init) - {% if method.client_streaming %} - requests = [request] - {% endif %} - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - {% if method.client_streaming %} - client.{{ method_name }}(iter(requests)) - {% else %} - client.{{ method_name }}(request) - {% endif %} - - -{% if method.flattened_fields and not method.client_streaming %} -def test_{{ method_name }}_rest_flattened(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + transport="rest", + ) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: @@ -1600,6 +1264,7 @@ def test_{{ method_name }}_rest_flattened(): {% endif %} response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} {% if method.server_streaming %} with mock.patch.object(response_value, 'iter_content') as iter_content: @@ -1756,25 +1421,7 @@ def test_{{ method_name }}_rest_pager(transport: str = 'rest'): assert page_.raw_page.next_page_token == token -{%- else %}{# paged_result_field #} - -def test_{{ method_name }}_rest_error(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - {%- if not method.http_options %} - # Since a `google.api.http` annotation is required for using a rest transport - # method, this should error. - with pytest.raises(NotImplementedError) as not_implemented_error: - client.{{ method_name }}({}) - assert ( - "Method {{ method.name }} is not available over REST transport" - in str(not_implemented_error.value) - ) - - {%- endif %}{# not method.http_options #} -{% endif %}{# flattened_fields #} +{%- endif %}{# paged_result_field #} {% else %}{# this is an lro or streaming method #} def test_{{ method_name }}_rest_unimplemented(): @@ -1807,32 +1454,34 @@ def test_{{ method_name }}_rest_no_http_options(): {% endwith %}{# method_name #} {% endmacro %} - -{% macro empty_call_test(method, method_name, service, api, uuid4_re, is_async=False) %} +{# + This is a generic macro for testing method calls. Ideally this macro can be used to avoid duplication + in Jinja templates. If this macro cannot be custimized for a specific method call test, consider + creating a new macro with the name `method_call_test_` for the macro which supports + a more customized method call. +#} +{% macro method_call_test_generic(test_name, method, service, api, transport, request_dict, is_async=False, routing_param=None) %} +{% set transport_name = get_transport_name(transport, is_async) %} +{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.safe_name|snake_case %} +{% set async_method_prefix = "async " if is_async else "" %} {% if is_async %} @pytest.mark.asyncio -async def test_{{ method_name }}_empty_call_async(): -{% else %} -def test_{{ method_name }}_empty_call(): -{% endif %}{# if is_async #} - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - {% if is_async %} - client = {{ service.async_client_name }}( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - {% else %} - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', +{% endif %}{# is_async #} +{{ async_method_prefix }}def test_{{ method_name }}_{{ test_name }}_{{transport_name}}(): + {% if transport_name == 'rest_asyncio' %} + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + {% endif %} + client = {{ get_client(service=service, is_async=is_async) }}( + credentials={{ get_credentials(is_async=is_async) }}, + transport="{{ transport_name }}", ) - {% endif %}{# if is_async #} - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.{{ method.transport_safe_name|snake_case }}), '__call__') as call: + {% if 'rest' not in transport %} {% if is_async %} # Designate an appropriate return value for the call. {% if method.void %} @@ -1857,24 +1506,49 @@ def test_{{ method_name }}_empty_call(): {% endfor %} )) {% endif %}{# method.void #} - await client.{{ method_name }}() {% else %}{# if not is_async #} - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.{{ method_name }}() + {% if method.void %} + call.return_value = None + {% elif method.lro %} + call.return_value = operations_pb2.Operation(name='operations/op') + {% elif method.server_streaming %} + call.return_value = iter([{{ method.output.ident }}()]) + {% else %} + call.return_value = {{ method.output.ident }}() + {% endif %} {% endif %}{# is_async #} + {% endif %}{# if 'rest' not in transport #} + {% if is_async %} + await client.{{ method_name }}(request={{ request_dict }}) + {% else %}{# is_async #} + client.{{ method_name }}(request={{ request_dict }}) + {% endif %}{# is_async #} + + # Establish that the underlying stub method was called. call.assert_called() - _, args, _ = call.mock_calls[0] + _, args, {% if routing_param %}kw{% else %}_{% endif %} = call.mock_calls[0] {% with method_settings = api.all_method_settings.get(method.meta.address.proto) %} {% if method_settings is not none %} {% for auto_populated_field in method_settings.auto_populated_fields %} # Ensure that the uuid4 field is set according to AIP 4235 - assert re.match(r"{{ uuid4_re }}", args[0].{{ auto_populated_field }}) + assert re.match(r"{{ get_uuid4_re() }}", args[0].{{ auto_populated_field }}) # clear UUID field so that the check below succeeds args[0].{{ auto_populated_field }} = None {% endfor %}{# for auto_populated_field in method_settings.auto_populated_fields #} {% endif %}{# if method_settings is not none #} {% endwith %}{# method_settings #} - assert args[0] == {{ method.input.ident }}() + {% if request_dict %} + request_msg = {{ method.input.ident }}(**{{ request_dict }}) + {% else %} + request_msg = {{ method.input.ident }}() + {% endif %}{# request_dict #} + assert args[0] == request_msg + + {% if routing_param %} + expected_headers = {{ method.routing_rule.resolve(method.routing_rule, routing_param.sample_request) }} + assert gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw['metadata'] + {% endif %} +{% endwith %}{# method_name #} {% endmacro %} {% macro get_credentials(is_async=False) %} @@ -1889,12 +1563,719 @@ def test_{{ method_name }}_empty_call(): {{- transport + ("_asyncio" if is_async else "") -}} {% endmacro %} -{% macro transport_kind_test(service, transport, is_async) %} +{% macro transport_kind_test(service, api, transport, is_async) %} {% set transport_name = get_transport_name(transport, is_async) %} def test_transport_kind_{{ transport_name }}(): + {% if transport_name == 'rest_asyncio' %} + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + {% endif %} transport = {{ get_client(service, is_async) }}.get_transport_class("{{ transport_name }}")( credentials={{get_credentials(is_async)}} ) assert transport.kind == "{{ transport_name }}" +{% endmacro %}{# transport_kind_test #} + +{% macro lro_client_test(service, api, transport, is_async) %} +{% if 'rest' in transport %} +{% set transport_name = get_transport_name(transport, is_async) %} +def test_{{ service.name|snake_case }}_{{ transport_name }}_lro_client(): + {% if transport_name == 'rest_asyncio' %} + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + {% endif %} + client = {{ get_client(service=service, is_async=is_async) }}( + credentials={{ get_credentials(is_async=is_async) }}, + transport="{{ transport_name }}", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, + {% if is_async %}operations_v1.AsyncOperationsRestClient{% else %}operations_v1.AbstractOperationsClient{% endif %}, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client +{% endif %}{# 'rest' in transport #} +{% endmacro %}{# lro_client_test #} + + +{% macro transport_close_test(service, api, transport, is_async) %} +{% set async_prefix = get_async_prefix(is_async) %} +{% set async_decorator = get_async_decorator(is_async) %} +{% set transport_name = get_transport_name(transport, is_async) %} +{% set close_session = { + 'rest': "_session", + 'grpc': "_grpc_channel"} +-%} +{{async_decorator}} +{{async_prefix}}def test_transport_close_{{transport_name}}(): + {% if transport_name == 'rest_asyncio' %} + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + {% endif %} + client = {{ get_client(service, is_async) }}( + credentials={{get_credentials(is_async)}}, + transport="{{transport_name}}" + ) + with mock.patch.object(type(getattr(client.transport, "{{close_session[transport]}}")), "close") as close: + {{async_prefix}}with client: + close.assert_not_called() + close.assert_called_once() + +{% endmacro %} + +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove / Update this test macro when async rest is GA. #} +{% macro async_rest_unsupported_params_test(service) %} +def test_unsupported_parameter_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + options = client_options.ClientOptions(quota_project_id="octopus") + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2137): Remove `type: ignore` once we add a version check for google-api-core. #} + with pytest.raises(core_exceptions.AsyncRestUnsupportedParameterError, match="google.api_core.client_options.ClientOptions.quota_project_id") as exc: # type: ignore + client = {{ get_client(service, True) }}( + credentials={{get_credentials(True)}}, + transport="rest_asyncio", + client_options=options + ) + +{% endmacro %} + +{# get_await_prefix sets an "await" keyword + # to a method call if is_async=True. +#} +{% macro get_await_prefix(is_async) %} +{{- "await " if is_async else "" -}} +{% endmacro %} + +{# get_async_prefix sets an "async" keyword + # to a method definition if is_async=True. +#} +{% macro get_async_prefix(is_async) %} +{{- "async " if is_async else "" -}} +{% endmacro %} + +{# get_async_decorator sets a "@pytest.mark.asyncio" decorator + # to an async test method if is_async=True. +#} +{% macro get_async_decorator(is_async) %} +{{- "@pytest.mark.asyncio " if is_async else "" -}} +{% endmacro %} + +{# is_rest_unsupported_method renders: + # 'True' if transport is REST (sync or async) and method is a client_streaming method. + # 'False' otherwise. + # NOTE: There are no plans to add support for client streaming. +#} +{% macro is_rest_unsupported_method(method, is_async) %} +{%- if method.client_streaming -%} +{{'True'}} +{%- else -%} +{{'False'}} +{%- endif -%} {% endmacro %} + +{# run_transport_tests_for_config generates all the rest specific tests for both +# sync and async transport. +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2142): Continue migrating the test cases +# in macro::run_transport_tests_for_config into here, and then delete that macro in favor of this one. +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2153): As a follow up, migrate gRPC test cases +# into `run_transport_tests_for_config` and make any of the rest specific specific macros which are called within more generic. +#} +{% macro run_transport_tests_for_config(service, api, transport, is_async) %} +{% for method in service.methods.values() %} +{% if is_rest_unsupported_method(method, is_async) == 'True' or not method.http_options %} +{{ rest_method_not_implemented_error(service, method, transport, is_async) }} +{% else %} +{% if 'rest' in transport %} +{{ bad_request_test(service, method, transport, is_async) }} +{{ call_success_test(service, method, transport, is_async) }} +{{ inteceptor_class_test(service, method, transport, is_async) }} +{% endif %}{# if 'rest' in transport #} +{% endif %}{# is_rest_unsupported_method(method, is_async) == 'False' and method.http_options #} +{% endfor %}{# for method in service.methods.values() #} +{% for name, sig in api.mixin_api_signatures.items() %} +{% if 'rest' in transport %} +{{ bad_request_mixins_test(service, api, name, sig, transport, is_async) }} +{{ call_success_mixins_test(service, api, name, sig, transport, is_async) }} +{% endif %}{# if 'rest' in transport #} +{% endfor %} +{{ initialize_client_with_transport_test(service, transport, is_async) }} +{% endmacro %} + +{# rest_method_not_implemented_error generates tests for methods + # which are not supported for rest transport. +#} +{% macro rest_method_not_implemented_error(service, method, transport, is_async) %} +{% if 'rest' in transport %} +{% set await_prefix = get_await_prefix(is_async) %} +{% set async_prefix = get_async_prefix(is_async) %} +{% set async_decorator = get_async_decorator(is_async) %} +{% set transport_name = get_transport_name(transport, is_async) %} +{% set method_name = method.safe_name|snake_case %} +{{async_decorator}} +{{async_prefix}}def test_{{ method_name }}_{{transport_name}}_error(): + {% if transport_name == 'rest_asyncio' %} + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + {% endif %} + + client = {{ get_client(service, is_async) }}( + credentials={{get_credentials(is_async)}}, + transport="{{transport_name}}" + ) + + with pytest.raises(NotImplementedError) as not_implemented_error: + {{await_prefix}}client.{{ method_name }}({}) + assert ( + "Method {{ method.name }} is not available over REST transport" + in str(not_implemented_error.value) + ) + +{% endif %}{# if 'rest' in transport #} +{% endmacro %} + +{# initialize_client_with_transport_test adds coverage for transport clients. + # Note: This test case is needed because we aren't unconditionally + # generating the not implemented coverage test for every client. +#} +{% macro initialize_client_with_transport_test(service, transport, is_async) %} +{% set transport_name = get_transport_name(transport, is_async) %} +def test_initialize_client_w_{{transport_name}}(): + {% if transport_name == 'rest_asyncio' %} + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + {% endif %} + client = {{ get_client(service, is_async) }}( + credentials={{get_credentials(is_async)}}, + transport="{{transport_name}}" + ) + assert client is not None + +{% endmacro %} + +{# bad_request_test generates tests for rest methods + # which raise a google.api.core.exceptions.BadRequest error. +#} +{% macro bad_request_test(service, method, transport, is_async) %} +{% set await_prefix = get_await_prefix(is_async) %} +{% set async_prefix = get_async_prefix(is_async) %} +{% set async_decorator = get_async_decorator(is_async) %} +{% set transport_name = get_transport_name(transport, is_async) %} +{% set method_name = method.safe_name|snake_case %} +{% set mocked_session = "AsyncAuthorizedSession" if is_async else "Session" %} +{{ async_decorator }} +{{ async_prefix }}def test_{{ method_name }}_{{transport_name}}_bad_request(request_type={{ method.input.ident }}): +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2157): Refactor this macro to include `gRPC` coverage. #} +{% if 'grpc' in transport %} + raise NotImplementedError("gRPC is currently not supported for this test case.") +{% else %}{# 'rest' in transport #} + {% if transport_name == 'rest_asyncio' %} + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + {% endif %} + client = {{ get_client(service, is_async) }}( + credentials={{get_credentials(is_async)}}, + transport="{{transport_name}}" + ) + # send a request that will satisfy transcoding + request_init = {{ method.http_options[0].sample_request(method) }} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object({{mocked_session}}, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + {% if is_async %} + response_value.read = mock.AsyncMock(return_value=b'{}') + {% else %} + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + {% endif %}{# if is_async #} + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + {{ await_prefix }}client.{{ method_name }}(request) + +{% endif %}{# if 'grpc' in transport #} +{% endmacro %} + +{# bad_request_mixins_test generates tests for rest mixin methods + # which raise a google.api.core.exceptions.BadRequest error. +#} +{% macro bad_request_mixins_test(service, api, name, sig, transport, is_async=False) %} +{% set await_prefix = get_await_prefix(is_async) %} +{% set async_prefix = get_async_prefix(is_async) %} +{% set async_decorator = get_async_decorator(is_async) %} +{% set transport_name = get_transport_name(transport, is_async) %} +{% set method_name = name|snake_case %} +{% set mocked_session = "AsyncAuthorizedSession" if is_async else "Session" %} +{{ async_decorator }} +{{ async_prefix }}def test_{{ method_name }}_{{ transport_name }}_bad_request(request_type={{ sig.request_type }}): +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2157): Refactor this macro to include `gRPC` coverage. #} +{% if 'grpc' in transport %} + raise NotImplementedError("gRPC is currently not supported for this test case.") +{% else %}{# 'rest' in transport #} + {% if transport_name == 'rest_asyncio' %} + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + {% endif %} + client = {{ get_client(service, is_async) }}( + credentials={{get_credentials(is_async)}}, + transport="{{transport_name}}", + ) + request = request_type() + request = json_format.ParseDict({{ api.mixin_http_options["{}".format(name)][0].sample_request }}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object({{mocked_session}}, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + {% if is_async %} + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + {% else %} + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + {% endif %}{# if is_async #} + response_value.status_code = 400 + {% if is_async %} + response_value.request = mock.Mock() + {% else %} + response_value.request = Request() + {% endif %} + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + {{ await_prefix }}client.{{ method_name }}(request) +{% endif %}{# if 'grpc' in transport #} +{% endmacro %} + +{# call_success_test generates tests for rest methods + # when they make a successful request. + # NOTE: Currently, this macro does not support the following method + # types: [method.paged_result_field]. + # As support is added for the above methods, the relevant guard can be removed from within the macro + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2142): Clean up `rest_required_tests` as we add support for each of the method types metioned above. +#} +{% macro call_success_test(service, method, transport, is_async) %} +{% set await_prefix = get_await_prefix(is_async) %} +{% set async_prefix = get_async_prefix(is_async) %} +{% set async_decorator = get_async_decorator(is_async) %} +{% set transport_name = get_transport_name(transport, is_async) %} +{% set method_name = method.safe_name|snake_case %} +{# NOTE: set method_output to method.extended_lro.operation_type for the following method types: + # (method.extended_lro and not full_extended_lro) +#} +{% set method_output = method.output %} +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2143): Update the guard below as we add support for each method, and keep it in sync with the guard in + # `rest_required_tests`, which should be the exact opposite. Remove it once we have all the methods supported in async rest transport that are supported in sync rest transport. + #} +{{ async_decorator }} +@pytest.mark.parametrize("request_type", [ + {{ method.input.ident }}, + dict, +]) +{{async_prefix}}def test_{{method_name}}_{{transport_name}}_call_success(request_type): +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2157): Refactor this macro to include `gRPC` coverage. #} +{% if 'grpc' in transport %} + raise NotImplementedError("gRPC is currently not supported for this test case.") +{% else %}{# 'rest' in transport #} + {% if transport_name == 'rest_asyncio' %} + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + {% endif %} + client = {{ get_client(service, is_async) }}( + credentials={{get_credentials(is_async)}}, + transport="{{transport_name}}" + ) + + # send a request that will satisfy transcoding + request_init = {{ method.http_options[0].sample_request(method) }} + {% for field in method.body_fields.values() %} + {% if not field.oneof or field.proto3_optional %} + {# ignore oneof fields that might conflict with sample_request #} + request_init["{{ field.name }}"] = {{ field.merged_mock_value(method.http_options[0].sample_request(method).get(field.name)) }} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = {{ method.input.ident }}.meta.fields["{{ field.name }}"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["{{ field.name }}"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["{{ field.name }}"][field])): + del request_init["{{ field.name }}"][field][i][subfield] + else: + del request_init["{{ field.name }}"][field][subfield] + {% endif %} + {% endfor %} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + {% if method.void %} + return_value = None + {% elif method.lro %} + return_value = operations_pb2.Operation(name='operations/spam') + {% elif method.extended_lro %} + return_value = {{ method.extended_lro.operation_type.ident }}( + {% for field in method.extended_lro.operation_type.fields.values() | rejectattr('message')%} + {% if not field.oneof or field.proto3_optional %} + {{ field.name }}={{ field.mock_value }}, + {% endif %}{% endfor %} + {# This is a hack to only pick one field #} + {% for oneof_fields in method.output.oneof_fields().values() %} + {% if (oneof_fields | rejectattr('message') | list) %} + {% with field = (oneof_fields | rejectattr('message') | first) %} + {{ field.name }}={{ field.mock_value }}, + {% endwith %} + {% endif %} + {% endfor %} + ) + {% else %} + return_value = {{ method.output.ident }}( + {% for field in method.output.fields.values() | rejectattr('message')%} + {% if not field.oneof or field.proto3_optional %} + {{ field.name }}={{ field.mock_value }}, + {% endif %}{% endfor %} + {# This is a hack to only pick one field #} + {% for oneof_fields in method.output.oneof_fields().values() %} + {% if (oneof_fields | rejectattr('message') | list) %} + {% with field = (oneof_fields | rejectattr('message') | first) %} + {{ field.name }}={{ field.mock_value }}, + {% endwith %} + {% endif %} + {% endfor %} + ) + {% endif %}{# method.void #} + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + {% if method.void %} + json_return_value = '' + {% else %}{# method.void #} + {% if method.output.ident.is_proto_plus_type %} + + # Convert return value to protobuf type + return_value = {{ method.output.ident }}.pb(return_value) + {% endif %}{# method.output.ident.is_proto_plus_type #} + json_return_value = json_format.MessageToJson(return_value) + {% endif %}{# method.void #} + {% if method.server_streaming %} + json_return_value = "[{}]".format(json_return_value) + {% if is_async %} + response_value.content.return_value = mock_async_gen(json_return_value) + {% else %}{# not is_async #} + response_value.iter_content = mock.Mock(return_value=iter(json_return_value)) + {% endif %}{# is_async #} + {% else %}{# not method.streaming #} + {% if is_async %} + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + {% else %}{# not is_async #} + response_value.content = json_return_value.encode('UTF-8') + {% endif %}{# is_async #} + {% endif %}{# method.server_streaming #} + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = {{ await_prefix }}client.{{ method_name }}(request) + {% if "next_page_token" in method_output.fields.values()|map(attribute='name', default="") and not method.paged_result_field %} + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2199): The following assert statement is added to force + # code coverage for bad paginated methods. Investigate what bad paginated methods are, and follow up on why this assertion is required. + #} + + assert response.raw_page is response + {% endif %} + + {% if method.server_streaming %} + {% if is_async %} + assert isinstance(response, AsyncIterable) + response = await response.__anext__() + {% else %} + assert isinstance(response, Iterable) + response = next(response) + {% endif %} + {% endif %} + + # Establish that the response is the type that we expect. + {% if method.void %} + assert response is None + {% elif method.lro %} + json_return_value = json_format.MessageToJson(return_value) + {% else %} + {% if is_async %} + assert isinstance(response, {{ method.client_output_async.ident }}) + {% else %} + assert isinstance(response, {{ method.client_output.ident }}) + {% endif %} + {% for field in method_output.fields.values() | rejectattr('message') %} + {% if not field.oneof or field.proto3_optional %} + {% if field.field_pb.type in [1, 2] %}{# Use approx eq for floats #} + {% if field.repeated %} + for index in range(len(response.{{ field.name }})): + assert math.isclose( + response.{{ field.name }}[index], + {{ field.mock_value }}[index], + rel_tol=1e-6, + ) + {% else %}{# field.repeated #} + assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) + {% endif %}{# field.repeated #} + {% elif field.field_pb.type == 8 %}{# Use 'is' for bools #} + assert response.{{ field.name }} is {{ field.mock_value }} + {% else %} + assert response.{{ field.name }} == {{ field.mock_value }} + {% endif %}{# field.field_pb.type in [1, 2] #} + {% endif %}{# not field.oneof or field.proto3_optional #} + {% endfor %}{# field in method_output.fields.values() | rejectattr('message') #} + {% endif %}{# method.void #} + +{% endif %}{# if 'grpc' in transport #} +{% endmacro %}{# call_success_test #} + +{# call_success_mixins_test generates tests for rest mixin methods + # when they make a successful request. +#} +{% macro call_success_mixins_test(service, api, name, sig, transport, is_async=False) %} +{% set await_prefix = get_await_prefix(is_async) %} +{% set async_prefix = get_async_prefix(is_async) %} +{% set async_decorator = get_async_decorator(is_async) %} +{% set transport_name = get_transport_name(transport, is_async) %} +{% set method_name = name|snake_case %} +{% set mocked_session = "AsyncAuthorizedSession" if is_async else "Session" %} +{{ async_decorator }} +@pytest.mark.parametrize("request_type", [ + {{ sig.request_type }}, + dict, +]) +{{ async_prefix }}def test_{{ method_name }}_{{ transport_name }}(request_type): +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2157): Refactor this macro to include `gRPC` coverage. #} +{% if 'grpc' in transport %} + raise NotImplementedError("gRPC is currently not supported for this test case.") +{% else %}{# 'rest' in transport #} + {% if transport_name == 'rest_asyncio' %} + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + {% endif %} + client = {{ get_client(service, is_async) }}( + credentials={{get_credentials(is_async)}}, + transport="{{transport_name}}", + ) + + request_init = {{ api.mixin_http_options["{}".format(name)][0].sample_request }} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object({{mocked_session}}, 'request') as req: + # Designate an appropriate value for the returned response. + {% if sig.response_type == "None" %} + return_value = None + {% else %} + return_value = {{ sig.response_type }}() + {% endif %} + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + {% if sig.response_type == "None" %} + json_return_value = '{}' + {% else %} + json_return_value = json_format.MessageToJson(return_value) + {% endif %} + {% if is_async %} + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + {% else %} + response_value.content = json_return_value.encode('UTF-8') + {% endif %} + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = {{ await_prefix }}client.{{ method_name }}(request) + + # Establish that the response is the type that we expect. + {% if sig.response_type == "None" %} + assert response is None + {% else %} + assert isinstance(response, {{ sig.response_type }}) + {% endif %} +{% endif %}{# if 'grpc' in transport #} +{% endmacro %}{# call_success_mixins_test #} + +{% macro empty_call_test(service, api, transport, is_async) %} +{% for method in service.methods.values() %}{# method #} +{% if not method.client_streaming %} +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +{{ method_call_test_generic("empty_call", method, service, api, transport, request_dict=None, is_async=is_async) }} +{% endif %}{# not method.client_streaming #} +{% endfor %}{# method in service.methods.values() #} +{% endmacro %}{# empty_call_test #} + +{% macro get_uuid4_re() -%} +[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12} +{%- endmacro %}{# uuid_re #} + +{% macro routing_parameter_test(service, api, transport, is_async) %} +{% for method in service.methods.values() %}{# method #} +{% if method.explicit_routing %} +{# Any value that is part of the HTTP/1.1 URI should be sent as #} +{# a field header. Set these to a non-empty value. #} +{% for routing_param in method.routing_rule.routing_parameters %} +{{ method_call_test_generic("routing_parameters_request_" + loop.index|string, method, service, api, transport, request_dict=routing_param.sample_request, is_async=is_async, routing_param=routing_param) }} +{% endfor %}{# routing_param in method.routing_rule.routing_parameters #} +{% endif %}{# method.explicit_routing #} +{% endfor %}{# method in service.methods.values() #} +{% endmacro %}{# routing_parameter_test #} + +{# inteceptor_class_test generates tests for rest interceptors. #} +{% macro inteceptor_class_test(service, method, transport, is_async) %} +{% set await_prefix = get_await_prefix(is_async) %} +{% set async_prefix = get_async_prefix(is_async) %} +{% set async_decorator = get_async_decorator(is_async) %} +{% set transport_name = get_transport_name(transport, is_async) %} +{% set method_name = method.safe_name|snake_case %} +{% set async_method_prefix = "Async" if is_async else "" %} +{{async_decorator}} +@pytest.mark.parametrize("null_interceptor", [True, False]) +{{async_prefix}}def test_{{ method_name }}_{{transport_name}}_interceptors(null_interceptor): +{% if 'grpc' in transport %} + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2191): + # Currently this macro only supports REST. It should be updated to support gRPC + # transport as well. + #} + raise NotImplementedError("gRPC is currently not supported for this test case.") +{% else %}{# 'rest' in transport #} + {% if transport_name == 'rest_asyncio' %} + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + {% endif %} + transport = transports.{{async_method_prefix}}{{ service.name }}RestTransport( + credentials={{get_credentials(is_async)}}, + interceptor=None if null_interceptor else transports.{{async_method_prefix}}{{ service.name}}RestInterceptor(), + ) + client = {{ get_client(service, is_async) }}(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + {% if method.lro %} + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + {% endif %} + {% if not method.void %} + mock.patch.object(transports.{{async_method_prefix}}{{ service.name }}RestInterceptor, "post_{{method.name|snake_case}}") as post, \ + mock.patch.object(transports.{{async_method_prefix}}{{ service.name }}RestInterceptor, "post_{{method.name|snake_case}}_with_metadata") as post_with_metadata, \ + {% endif %} + mock.patch.object(transports.{{async_method_prefix}}{{ service.name }}RestInterceptor, "pre_{{ method.name|snake_case }}") as pre: + pre.assert_not_called() + {% if not method.void %} + post.assert_not_called() + post_with_metadata.assert_not_called() + {% endif %} + {% if method.input.ident.is_proto_plus_type %} + pb_message = {{ method.input.ident }}.pb({{ method.input.ident }}()) + {% else %} + pb_message = {{ method.input.ident }}() + {% endif %}{# if method.input.ident.is_proto_plus_type #} + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + {% if not method.void %} + return_value = {% if method.output.ident.is_proto_plus_type %}{{ method.output.ident }}.to_json({{ method.output.ident }}()){% else %}json_format.MessageToJson({{ method.output.ident }}()){% endif %} + + {% if method.server_streaming %} + {% if is_async %} + req.return_value.content.return_value = mock_async_gen(return_value) + {% else %}{# not is_async #} + req.return_value.iter_content = mock.Mock(return_value=iter(return_value)) + {% endif %}{# end is_async #} + {% else %}{# not method.server_streaming #} + {% if is_async %} + req.return_value.read = mock.AsyncMock(return_value=return_value) + {% else %}{# not is_async #} + req.return_value.content = return_value + {% endif %}{# end is_async #} + {% endif %}{# end method.server_streaming #} + {% endif %}{# end not method.void #} + + request = {{ method.input.ident }}() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + {% if not method.void %} + post.return_value = {{ method.output.ident }}() + post_with_metadata.return_value = {{ method.output.ident }}(), metadata + {% endif %} + + {{await_prefix}}client.{{ method_name }}(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + {% if not method.void %} + post.assert_called_once() + post_with_metadata.assert_called_once() + {% endif %} +{% endif %}{# end 'grpc' in transport #} +{% endmacro%}{# inteceptor_class_test #} diff --git a/gapic/utils/reserved_names.py b/gapic/utils/reserved_names.py index 0914cb02e3..6aac107420 100644 --- a/gapic/utils/reserved_names.py +++ b/gapic/utils/reserved_names.py @@ -89,6 +89,8 @@ "exec", "help", # Comes from Protoplus - "ignore_unknown_fields" + "ignore_unknown_fields", + "self", # Refer to PEP8 https://peps.python.org/pep-0008/#function-and-method-arguments + "cls", # Refer PEP8 https://peps.python.org/pep-0008/#function-and-method-arguments ] ) diff --git a/noxfile.py b/noxfile.py index 0a28618b4e..69af653873 100644 --- a/noxfile.py +++ b/noxfile.py @@ -40,6 +40,7 @@ "3.10", "3.11", "3.12", + "3.13", ) NEWEST_PYTHON = ALL_PYTHON[-1] @@ -304,7 +305,19 @@ def showcase_library( f"{tmp_dir}/testing/constraints-{session.python}.txt" ) # Install the library with a constraints file. - session.install("-e", tmp_dir, "-r", constraints_path) + if session.python == "3.7": + session.install("-e", tmp_dir, "-r", constraints_path) + if rest_async_io_enabled: + # NOTE: We re-install `google-api-core` and `google-auth` to override the respective + # versions for each specified in constraints-3.7.txt. This is needed because async REST + # is not supported with the minimum version of `google-api-core` and `google-auth`. + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2211): Remove hardcoded dependencies + # from here and add a new constraints file for testing the minimum supported versions for async REST feature. + session.install('--no-cache-dir', '--force-reinstall', "google-api-core[grpc, async_rest]==2.21.0") + # session.install('--no-cache-dir', '--force-reinstall', "google-api-core==2.20.0") + session.install('--no-cache-dir', '--force-reinstall', "google-auth[aiohttp]==2.35.0") + else: + session.install("-e", tmp_dir + ("[async_rest]" if rest_async_io_enabled else ""), "-r", constraints_path) else: # The ads templates do not have constraints files. # See https://github.com/googleapis/gapic-generator-python/issues/1788 @@ -341,6 +354,33 @@ def showcase( env=env, ) +@nox.session(python=ALL_PYTHON) +def showcase_w_rest_async( + session, + templates="DEFAULT", + other_opts: typing.Iterable[str] = (), + env: typing.Optional[typing.Dict[str, str]] = {}, +): + """Run the Showcase test suite.""" + + with showcase_library(session, templates=templates, other_opts=other_opts, rest_async_io_enabled=True): + session.install("pytest", "pytest-asyncio") + test_directory = Path("tests", "system") + ignore_file = env.get("IGNORE_FILE") + pytest_command = [ + "py.test", + "--quiet", + *(session.posargs or [str(test_directory)]), + ] + if ignore_file: + ignore_path = test_directory / ignore_file + pytest_command.extend(["--ignore", str(ignore_path)]) + + session.run( + *pytest_command, + env=env, + ) + @nox.session(python=NEWEST_PYTHON) def showcase_mtls( @@ -393,7 +433,7 @@ def showcase_mtls_alternative_templates(session): ) -def run_showcase_unit_tests(session, fail_under=100): +def run_showcase_unit_tests(session, fail_under=100, rest_async_io_enabled=False): session.install( "coverage", "pytest", @@ -402,22 +442,38 @@ def run_showcase_unit_tests(session, fail_under=100): "asyncmock; python_version < '3.8'", "pytest-asyncio", ) - # Run the tests. - session.run( - "py.test", - *( - session.posargs - or [ - "-n=auto", - "--quiet", - "--cov=google", - "--cov-append", - f"--cov-fail-under={str(fail_under)}", - path.join("tests", "unit"), - ] - ), - ) + # NOTE: async rest is not supported against the minimum supported version of google-api-core. + # Therefore, we ignore the coverage requirement in this case. + if session.python == "3.7" and rest_async_io_enabled: + session.run( + "py.test", + *( + session.posargs + or [ + "-n=auto", + "--quiet", + "--cov=google", + "--cov-append", + path.join("tests", "unit"), + ] + ), + ) + else: + session.run( + "py.test", + *( + session.posargs + or [ + "-n=auto", + "--quiet", + "--cov=google", + "--cov-append", + f"--cov-fail-under={str(fail_under)}", + path.join("tests", "unit"), + ] + ), + ) @nox.session(python=ALL_PYTHON) @@ -440,7 +496,7 @@ def showcase_unit_w_rest_async( """Run the generated unit tests with async rest transport against the Showcase library.""" with showcase_library(session, templates=templates, other_opts=other_opts, rest_async_io_enabled=True) as lib: session.chdir(lib) - run_showcase_unit_tests(session) + run_showcase_unit_tests(session, rest_async_io_enabled=True) @nox.session(python=ALL_PYTHON) diff --git a/owlbot.py b/owlbot.py index dcb31e46eb..3dcd7f1516 100644 --- a/owlbot.py +++ b/owlbot.py @@ -21,13 +21,6 @@ excludes=["samples/**/*", "test-samples*", "publish-docs.sh", "*/prerelease-deps.cfg"], ) -# remove docfx build -assert 1 == s.replace( - ".kokoro/docs/docs-presubmit.cfg", - 'value: "docs docfx"', - 'value: "docs"', -) - # needed for docs build s.move(templated_files / ".trampolinerc") diff --git a/requirements.in b/requirements.in index db955dc7ec..d6a1c8d6f1 100644 --- a/requirements.in +++ b/requirements.in @@ -11,3 +11,4 @@ proto-plus pytest-asyncio libcst inflection +aiohttp diff --git a/requirements.txt b/requirements.txt index ab28e808cc..eda186e271 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,107 +2,216 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --generate-hashes requirements.in +# pip-compile --allow-unsafe --generate-hashes requirements.in # +aiohappyeyeballs==2.4.4 \ + --hash=sha256:5fdd7d87889c63183afc18ce9271f9b0a7d32c2303e394468dd45d514a757745 \ + --hash=sha256:a980909d50efcd44795c4afeca523296716d50cd756ddca6af8c65b996e27de8 + # via aiohttp +aiohttp==3.11.10 \ + --hash=sha256:012f176945af138abc10c4a48743327a92b4ca9adc7a0e078077cdb5dbab7be0 \ + --hash=sha256:02c13415b5732fb6ee7ff64583a5e6ed1c57aa68f17d2bda79c04888dfdc2769 \ + --hash=sha256:03b6002e20938fc6ee0918c81d9e776bebccc84690e2b03ed132331cca065ee5 \ + --hash=sha256:04814571cb72d65a6899db6099e377ed00710bf2e3eafd2985166f2918beaf59 \ + --hash=sha256:0580f2e12de2138f34debcd5d88894786453a76e98febaf3e8fe5db62d01c9bf \ + --hash=sha256:06a8e2ee1cbac16fe61e51e0b0c269400e781b13bcfc33f5425912391a542985 \ + --hash=sha256:076bc454a7e6fd646bc82ea7f98296be0b1219b5e3ef8a488afbdd8e81fbac50 \ + --hash=sha256:0c9527819b29cd2b9f52033e7fb9ff08073df49b4799c89cb5754624ecd98299 \ + --hash=sha256:0dc49f42422163efb7e6f1df2636fe3db72713f6cd94688e339dbe33fe06d61d \ + --hash=sha256:14cdb5a9570be5a04eec2ace174a48ae85833c2aadc86de68f55541f66ce42ab \ + --hash=sha256:15fccaf62a4889527539ecb86834084ecf6e9ea70588efde86e8bc775e0e7542 \ + --hash=sha256:24213ba85a419103e641e55c27dc7ff03536c4873470c2478cce3311ba1eee7b \ + --hash=sha256:31d5093d3acd02b31c649d3a69bb072d539d4c7659b87caa4f6d2bcf57c2fa2b \ + --hash=sha256:3691ed7726fef54e928fe26344d930c0c8575bc968c3e239c2e1a04bd8cf7838 \ + --hash=sha256:386fbe79863eb564e9f3615b959e28b222259da0c48fd1be5929ac838bc65683 \ + --hash=sha256:3bbbfff4c679c64e6e23cb213f57cc2c9165c9a65d63717108a644eb5a7398df \ + --hash=sha256:3de34936eb1a647aa919655ff8d38b618e9f6b7f250cc19a57a4bf7fd2062b6d \ + --hash=sha256:40d1c7a7f750b5648642586ba7206999650208dbe5afbcc5284bcec6579c9b91 \ + --hash=sha256:44224d815853962f48fe124748227773acd9686eba6dc102578defd6fc99e8d9 \ + --hash=sha256:47ad15a65fb41c570cd0ad9a9ff8012489e68176e7207ec7b82a0940dddfd8be \ + --hash=sha256:482cafb7dc886bebeb6c9ba7925e03591a62ab34298ee70d3dd47ba966370d2c \ + --hash=sha256:49c7dbbc1a559ae14fc48387a115b7d4bbc84b4a2c3b9299c31696953c2a5219 \ + --hash=sha256:4b2c7ac59c5698a7a8207ba72d9e9c15b0fc484a560be0788b31312c2c5504e4 \ + --hash=sha256:4cca22a61b7fe45da8fc73c3443150c3608750bbe27641fc7558ec5117b27fdf \ + --hash=sha256:4cfce37f31f20800a6a6620ce2cdd6737b82e42e06e6e9bd1b36f546feb3c44f \ + --hash=sha256:502a1464ccbc800b4b1995b302efaf426e8763fadf185e933c2931df7db9a199 \ + --hash=sha256:53bf2097e05c2accc166c142a2090e4c6fd86581bde3fd9b2d3f9e93dda66ac1 \ + --hash=sha256:593c114a2221444f30749cc5e5f4012488f56bd14de2af44fe23e1e9894a9c60 \ + --hash=sha256:5d6958671b296febe7f5f859bea581a21c1d05430d1bbdcf2b393599b1cdce77 \ + --hash=sha256:5ef359ebc6949e3a34c65ce20230fae70920714367c63afd80ea0c2702902ccf \ + --hash=sha256:613e5169f8ae77b1933e42e418a95931fb4867b2991fc311430b15901ed67079 \ + --hash=sha256:61b9bae80ed1f338c42f57c16918853dc51775fb5cb61da70d590de14d8b5fb4 \ + --hash=sha256:6362cc6c23c08d18ddbf0e8c4d5159b5df74fea1a5278ff4f2c79aed3f4e9f46 \ + --hash=sha256:65a96e3e03300b41f261bbfd40dfdbf1c301e87eab7cd61c054b1f2e7c89b9e8 \ + --hash=sha256:65e55ca7debae8faaffee0ebb4b47a51b4075f01e9b641c31e554fd376595c6c \ + --hash=sha256:68386d78743e6570f054fe7949d6cb37ef2b672b4d3405ce91fafa996f7d9b4d \ + --hash=sha256:68ff6f48b51bd78ea92b31079817aff539f6c8fc80b6b8d6ca347d7c02384e33 \ + --hash=sha256:6ab29b8a0beb6f8eaf1e5049252cfe74adbaafd39ba91e10f18caeb0e99ffb34 \ + --hash=sha256:77ae58586930ee6b2b6f696c82cf8e78c8016ec4795c53e36718365f6959dc82 \ + --hash=sha256:77c4aa15a89847b9891abf97f3d4048f3c2d667e00f8a623c89ad2dccee6771b \ + --hash=sha256:78153314f26d5abef3239b4a9af20c229c6f3ecb97d4c1c01b22c4f87669820c \ + --hash=sha256:7852bbcb4d0d2f0c4d583f40c3bc750ee033265d80598d0f9cb6f372baa6b836 \ + --hash=sha256:7e97d622cb083e86f18317282084bc9fbf261801b0192c34fe4b1febd9f7ae69 \ + --hash=sha256:7f3dc0e330575f5b134918976a645e79adf333c0a1439dcf6899a80776c9ab39 \ + --hash=sha256:80886dac673ceaef499de2f393fc80bb4481a129e6cb29e624a12e3296cc088f \ + --hash=sha256:811f23b3351ca532af598405db1093f018edf81368e689d1b508c57dcc6b6a32 \ + --hash=sha256:86a5dfcc39309470bd7b68c591d84056d195428d5d2e0b5ccadfbaf25b026ebc \ + --hash=sha256:8b3cf2dc0f0690a33f2d2b2cb15db87a65f1c609f53c37e226f84edb08d10f52 \ + --hash=sha256:8cc5203b817b748adccb07f36390feb730b1bc5f56683445bfe924fc270b8816 \ + --hash=sha256:909af95a72cedbefe5596f0bdf3055740f96c1a4baa0dd11fd74ca4de0b4e3f1 \ + --hash=sha256:974d3a2cce5fcfa32f06b13ccc8f20c6ad9c51802bb7f829eae8a1845c4019ec \ + --hash=sha256:98283b94cc0e11c73acaf1c9698dea80c830ca476492c0fe2622bd931f34b487 \ + --hash=sha256:98f5635f7b74bcd4f6f72fcd85bea2154b323a9f05226a80bc7398d0c90763b0 \ + --hash=sha256:99b7920e7165be5a9e9a3a7f1b680f06f68ff0d0328ff4079e5163990d046767 \ + --hash=sha256:9bca390cb247dbfaec3c664326e034ef23882c3f3bfa5fbf0b56cad0320aaca5 \ + --hash=sha256:9e2e576caec5c6a6b93f41626c9c02fc87cd91538b81a3670b2e04452a63def6 \ + --hash=sha256:9ef405356ba989fb57f84cac66f7b0260772836191ccefbb987f414bcd2979d9 \ + --hash=sha256:a55d2ad345684e7c3dd2c20d2f9572e9e1d5446d57200ff630e6ede7612e307f \ + --hash=sha256:ab7485222db0959a87fbe8125e233b5a6f01f4400785b36e8a7878170d8c3138 \ + --hash=sha256:b1fc6b45010a8d0ff9e88f9f2418c6fd408c99c211257334aff41597ebece42e \ + --hash=sha256:b78f053a7ecfc35f0451d961dacdc671f4bcbc2f58241a7c820e9d82559844cf \ + --hash=sha256:b99acd4730ad1b196bfb03ee0803e4adac371ae8efa7e1cbc820200fc5ded109 \ + --hash=sha256:be2b516f56ea883a3e14dda17059716593526e10fb6303189aaf5503937db408 \ + --hash=sha256:beb39a6d60a709ae3fb3516a1581777e7e8b76933bb88c8f4420d875bb0267c6 \ + --hash=sha256:bf3d1a519a324af764a46da4115bdbd566b3c73fb793ffb97f9111dbc684fc4d \ + --hash=sha256:c49a76c1038c2dd116fa443eba26bbb8e6c37e924e2513574856de3b6516be99 \ + --hash=sha256:c5532f0441fc09c119e1dca18fbc0687e64fbeb45aa4d6a87211ceaee50a74c4 \ + --hash=sha256:c6b9e6d7e41656d78e37ce754813fa44b455c3d0d0dced2a047def7dc5570b74 \ + --hash=sha256:c87bf31b7fdab94ae3adbe4a48e711bfc5f89d21cf4c197e75561def39e223bc \ + --hash=sha256:cbad88a61fa743c5d283ad501b01c153820734118b65aee2bd7dbb735475ce0d \ + --hash=sha256:cf14627232dfa8730453752e9cdc210966490992234d77ff90bc8dc0dce361d5 \ + --hash=sha256:db1d0b28fcb7f1d35600150c3e4b490775251dea70f894bf15c678fdd84eda6a \ + --hash=sha256:ddf5f7d877615f6a1e75971bfa5ac88609af3b74796ff3e06879e8422729fd01 \ + --hash=sha256:e44a9a3c053b90c6f09b1bb4edd880959f5328cf63052503f892c41ea786d99f \ + --hash=sha256:efb15a17a12497685304b2d976cb4939e55137df7b09fa53f1b6a023f01fcb4e \ + --hash=sha256:fbbaea811a2bba171197b08eea288b9402faa2bab2ba0858eecdd0a4105753a3 + # via -r requirements.in +aiosignal==1.3.2 \ + --hash=sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5 \ + --hash=sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54 + # via aiohttp +async-timeout==5.0.1 \ + --hash=sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c \ + --hash=sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3 + # via aiohttp +attrs==24.3.0 \ + --hash=sha256:8f5c07333d543103541ba7be0e2ce16eeee8130cb0b3f9238ab904ce1e85baff \ + --hash=sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308 + # via aiohttp cachetools==5.5.0 \ --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a # via google-auth -certifi==2024.8.30 \ - --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ - --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 +certifi==2024.12.14 \ + --hash=sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56 \ + --hash=sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db # via requests -charset-normalizer==3.3.2 \ - --hash=sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027 \ - --hash=sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087 \ - --hash=sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786 \ - --hash=sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8 \ - --hash=sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09 \ - --hash=sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185 \ - --hash=sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574 \ - --hash=sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e \ - --hash=sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519 \ - --hash=sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898 \ - --hash=sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269 \ - --hash=sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3 \ - --hash=sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f \ - --hash=sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6 \ - --hash=sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8 \ - --hash=sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a \ - --hash=sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73 \ - --hash=sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc \ - --hash=sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714 \ - --hash=sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2 \ - --hash=sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc \ - --hash=sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce \ - --hash=sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d \ - --hash=sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e \ - --hash=sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6 \ - --hash=sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269 \ - --hash=sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96 \ - --hash=sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d \ - --hash=sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a \ - --hash=sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4 \ - --hash=sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77 \ - --hash=sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d \ - --hash=sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0 \ - --hash=sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed \ - --hash=sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068 \ - --hash=sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac \ - --hash=sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25 \ - --hash=sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8 \ - --hash=sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab \ - --hash=sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26 \ - --hash=sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2 \ - --hash=sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db \ - --hash=sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f \ - --hash=sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5 \ - --hash=sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99 \ - --hash=sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c \ - --hash=sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d \ - --hash=sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811 \ - --hash=sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa \ - --hash=sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a \ - --hash=sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03 \ - --hash=sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b \ - --hash=sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04 \ - --hash=sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c \ - --hash=sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001 \ - --hash=sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458 \ - --hash=sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389 \ - --hash=sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99 \ - --hash=sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985 \ - --hash=sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537 \ - --hash=sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238 \ - --hash=sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f \ - --hash=sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d \ - --hash=sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796 \ - --hash=sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a \ - --hash=sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143 \ - --hash=sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8 \ - --hash=sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c \ - --hash=sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5 \ - --hash=sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5 \ - --hash=sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711 \ - --hash=sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4 \ - --hash=sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6 \ - --hash=sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c \ - --hash=sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7 \ - --hash=sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4 \ - --hash=sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b \ - --hash=sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae \ - --hash=sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12 \ - --hash=sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c \ - --hash=sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae \ - --hash=sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8 \ - --hash=sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887 \ - --hash=sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b \ - --hash=sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4 \ - --hash=sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f \ - --hash=sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5 \ - --hash=sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33 \ - --hash=sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519 \ - --hash=sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561 +charset-normalizer==3.4.0 \ + --hash=sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621 \ + --hash=sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6 \ + --hash=sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8 \ + --hash=sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912 \ + --hash=sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c \ + --hash=sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b \ + --hash=sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d \ + --hash=sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d \ + --hash=sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95 \ + --hash=sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e \ + --hash=sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565 \ + --hash=sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64 \ + --hash=sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab \ + --hash=sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be \ + --hash=sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e \ + --hash=sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907 \ + --hash=sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0 \ + --hash=sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2 \ + --hash=sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62 \ + --hash=sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62 \ + --hash=sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23 \ + --hash=sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc \ + --hash=sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284 \ + --hash=sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca \ + --hash=sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455 \ + --hash=sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858 \ + --hash=sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b \ + --hash=sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594 \ + --hash=sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc \ + --hash=sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db \ + --hash=sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b \ + --hash=sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea \ + --hash=sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6 \ + --hash=sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920 \ + --hash=sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749 \ + --hash=sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7 \ + --hash=sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd \ + --hash=sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99 \ + --hash=sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242 \ + --hash=sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee \ + --hash=sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129 \ + --hash=sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2 \ + --hash=sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51 \ + --hash=sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee \ + --hash=sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8 \ + --hash=sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b \ + --hash=sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613 \ + --hash=sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742 \ + --hash=sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe \ + --hash=sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3 \ + --hash=sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5 \ + --hash=sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631 \ + --hash=sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7 \ + --hash=sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15 \ + --hash=sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c \ + --hash=sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea \ + --hash=sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417 \ + --hash=sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250 \ + --hash=sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88 \ + --hash=sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca \ + --hash=sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa \ + --hash=sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99 \ + --hash=sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149 \ + --hash=sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41 \ + --hash=sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574 \ + --hash=sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0 \ + --hash=sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f \ + --hash=sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d \ + --hash=sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654 \ + --hash=sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3 \ + --hash=sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19 \ + --hash=sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90 \ + --hash=sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578 \ + --hash=sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9 \ + --hash=sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1 \ + --hash=sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51 \ + --hash=sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719 \ + --hash=sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236 \ + --hash=sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a \ + --hash=sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c \ + --hash=sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade \ + --hash=sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944 \ + --hash=sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc \ + --hash=sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6 \ + --hash=sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6 \ + --hash=sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27 \ + --hash=sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6 \ + --hash=sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2 \ + --hash=sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12 \ + --hash=sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf \ + --hash=sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114 \ + --hash=sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7 \ + --hash=sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf \ + --hash=sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d \ + --hash=sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b \ + --hash=sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed \ + --hash=sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03 \ + --hash=sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4 \ + --hash=sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67 \ + --hash=sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365 \ + --hash=sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a \ + --hash=sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748 \ + --hash=sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b \ + --hash=sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079 \ + --hash=sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482 # via requests click==8.1.7 \ --hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \ @@ -112,17 +221,113 @@ exceptiongroup==1.2.2 \ --hash=sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b \ --hash=sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc # via pytest -google-api-core==2.19.2 \ - --hash=sha256:53ec0258f2837dd53bbd3d3df50f5359281b3cc13f800c941dd15a9b5a415af4 \ - --hash=sha256:ca07de7e8aa1c98a8bfca9321890ad2340ef7f2eb136e558cee68f24b94b0a8f +frozenlist==1.5.0 \ + --hash=sha256:000a77d6034fbad9b6bb880f7ec073027908f1b40254b5d6f26210d2dab1240e \ + --hash=sha256:03d33c2ddbc1816237a67f66336616416e2bbb6beb306e5f890f2eb22b959cdf \ + --hash=sha256:04a5c6babd5e8fb7d3c871dc8b321166b80e41b637c31a995ed844a6139942b6 \ + --hash=sha256:0996c66760924da6e88922756d99b47512a71cfd45215f3570bf1e0b694c206a \ + --hash=sha256:0cc974cc93d32c42e7b0f6cf242a6bd941c57c61b618e78b6c0a96cb72788c1d \ + --hash=sha256:0f253985bb515ecd89629db13cb58d702035ecd8cfbca7d7a7e29a0e6d39af5f \ + --hash=sha256:11aabdd62b8b9c4b84081a3c246506d1cddd2dd93ff0ad53ede5defec7886b28 \ + --hash=sha256:12f78f98c2f1c2429d42e6a485f433722b0061d5c0b0139efa64f396efb5886b \ + --hash=sha256:140228863501b44b809fb39ec56b5d4071f4d0aa6d216c19cbb08b8c5a7eadb9 \ + --hash=sha256:1431d60b36d15cda188ea222033eec8e0eab488f39a272461f2e6d9e1a8e63c2 \ + --hash=sha256:15538c0cbf0e4fa11d1e3a71f823524b0c46299aed6e10ebb4c2089abd8c3bec \ + --hash=sha256:15b731db116ab3aedec558573c1a5eec78822b32292fe4f2f0345b7f697745c2 \ + --hash=sha256:17dcc32fc7bda7ce5875435003220a457bcfa34ab7924a49a1c19f55b6ee185c \ + --hash=sha256:1893f948bf6681733aaccf36c5232c231e3b5166d607c5fa77773611df6dc336 \ + --hash=sha256:189f03b53e64144f90990d29a27ec4f7997d91ed3d01b51fa39d2dbe77540fd4 \ + --hash=sha256:1a8ea951bbb6cacd492e3948b8da8c502a3f814f5d20935aae74b5df2b19cf3d \ + --hash=sha256:1b96af8c582b94d381a1c1f51ffaedeb77c821c690ea5f01da3d70a487dd0a9b \ + --hash=sha256:1e76bfbc72353269c44e0bc2cfe171900fbf7f722ad74c9a7b638052afe6a00c \ + --hash=sha256:2150cc6305a2c2ab33299453e2968611dacb970d2283a14955923062c8d00b10 \ + --hash=sha256:226d72559fa19babe2ccd920273e767c96a49b9d3d38badd7c91a0fdeda8ea08 \ + --hash=sha256:237f6b23ee0f44066219dae14c70ae38a63f0440ce6750f868ee08775073f942 \ + --hash=sha256:29d94c256679247b33a3dc96cce0f93cbc69c23bf75ff715919332fdbb6a32b8 \ + --hash=sha256:2b5e23253bb709ef57a8e95e6ae48daa9ac5f265637529e4ce6b003a37b2621f \ + --hash=sha256:2d0da8bbec082bf6bf18345b180958775363588678f64998c2b7609e34719b10 \ + --hash=sha256:2f3f7a0fbc219fb4455264cae4d9f01ad41ae6ee8524500f381de64ffaa077d5 \ + --hash=sha256:30c72000fbcc35b129cb09956836c7d7abf78ab5416595e4857d1cae8d6251a6 \ + --hash=sha256:31115ba75889723431aa9a4e77d5f398f5cf976eea3bdf61749731f62d4a4a21 \ + --hash=sha256:31a9ac2b38ab9b5a8933b693db4939764ad3f299fcaa931a3e605bc3460e693c \ + --hash=sha256:366d8f93e3edfe5a918c874702f78faac300209a4d5bf38352b2c1bdc07a766d \ + --hash=sha256:374ca2dabdccad8e2a76d40b1d037f5bd16824933bf7bcea3e59c891fd4a0923 \ + --hash=sha256:44c49271a937625619e862baacbd037a7ef86dd1ee215afc298a417ff3270608 \ + --hash=sha256:45e0896250900b5aa25180f9aec243e84e92ac84bd4a74d9ad4138ef3f5c97de \ + --hash=sha256:498524025a5b8ba81695761d78c8dd7382ac0b052f34e66939c42df860b8ff17 \ + --hash=sha256:50cf5e7ee9b98f22bdecbabf3800ae78ddcc26e4a435515fc72d97903e8488e0 \ + --hash=sha256:52ef692a4bc60a6dd57f507429636c2af8b6046db8b31b18dac02cbc8f507f7f \ + --hash=sha256:561eb1c9579d495fddb6da8959fd2a1fca2c6d060d4113f5844b433fc02f2641 \ + --hash=sha256:5a3ba5f9a0dfed20337d3e966dc359784c9f96503674c2faf015f7fe8e96798c \ + --hash=sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a \ + --hash=sha256:5c28f4b5dbef8a0d8aad0d4de24d1e9e981728628afaf4ea0792f5d0939372f0 \ + --hash=sha256:5d7f5a50342475962eb18b740f3beecc685a15b52c91f7d975257e13e029eca9 \ + --hash=sha256:6321899477db90bdeb9299ac3627a6a53c7399c8cd58d25da094007402b039ab \ + --hash=sha256:6482a5851f5d72767fbd0e507e80737f9c8646ae7fd303def99bfe813f76cf7f \ + --hash=sha256:666534d15ba8f0fda3f53969117383d5dc021266b3c1a42c9ec4855e4b58b9d3 \ + --hash=sha256:683173d371daad49cffb8309779e886e59c2f369430ad28fe715f66d08d4ab1a \ + --hash=sha256:6e9080bb2fb195a046e5177f10d9d82b8a204c0736a97a153c2466127de87784 \ + --hash=sha256:73f2e31ea8dd7df61a359b731716018c2be196e5bb3b74ddba107f694fbd7604 \ + --hash=sha256:7437601c4d89d070eac8323f121fcf25f88674627505334654fd027b091db09d \ + --hash=sha256:76e4753701248476e6286f2ef492af900ea67d9706a0155335a40ea21bf3b2f5 \ + --hash=sha256:7707a25d6a77f5d27ea7dc7d1fc608aa0a478193823f88511ef5e6b8a48f9d03 \ + --hash=sha256:7948140d9f8ece1745be806f2bfdf390127cf1a763b925c4a805c603df5e697e \ + --hash=sha256:7a1a048f9215c90973402e26c01d1cff8a209e1f1b53f72b95c13db61b00f953 \ + --hash=sha256:7d57d8f702221405a9d9b40f9da8ac2e4a1a8b5285aac6100f3393675f0a85ee \ + --hash=sha256:7f3c8c1dacd037df16e85227bac13cca58c30da836c6f936ba1df0c05d046d8d \ + --hash=sha256:81d5af29e61b9c8348e876d442253723928dce6433e0e76cd925cd83f1b4b817 \ + --hash=sha256:828afae9f17e6de596825cf4228ff28fbdf6065974e5ac1410cecc22f699d2b3 \ + --hash=sha256:87f724d055eb4785d9be84e9ebf0f24e392ddfad00b3fe036e43f489fafc9039 \ + --hash=sha256:8969190d709e7c48ea386db202d708eb94bdb29207a1f269bab1196ce0dcca1f \ + --hash=sha256:90646abbc7a5d5c7c19461d2e3eeb76eb0b204919e6ece342feb6032c9325ae9 \ + --hash=sha256:91d6c171862df0a6c61479d9724f22efb6109111017c87567cfeb7b5d1449fdf \ + --hash=sha256:9272fa73ca71266702c4c3e2d4a28553ea03418e591e377a03b8e3659d94fa76 \ + --hash=sha256:92b5278ed9d50fe610185ecd23c55d8b307d75ca18e94c0e7de328089ac5dcba \ + --hash=sha256:97160e245ea33d8609cd2b8fd997c850b56db147a304a262abc2b3be021a9171 \ + --hash=sha256:977701c081c0241d0955c9586ffdd9ce44f7a7795df39b9151cd9a6fd0ce4cfb \ + --hash=sha256:9b7dc0c4338e6b8b091e8faf0db3168a37101943e687f373dce00959583f7439 \ + --hash=sha256:9b93d7aaa36c966fa42efcaf716e6b3900438632a626fb09c049f6a2f09fc631 \ + --hash=sha256:9bbcdfaf4af7ce002694a4e10a0159d5a8d20056a12b05b45cea944a4953f972 \ + --hash=sha256:9c2623347b933fcb9095841f1cc5d4ff0b278addd743e0e966cb3d460278840d \ + --hash=sha256:a2fe128eb4edeabe11896cb6af88fca5346059f6c8d807e3b910069f39157869 \ + --hash=sha256:a72b7a6e3cd2725eff67cd64c8f13335ee18fc3c7befc05aed043d24c7b9ccb9 \ + --hash=sha256:a9fe0f1c29ba24ba6ff6abf688cb0b7cf1efab6b6aa6adc55441773c252f7411 \ + --hash=sha256:b97f7b575ab4a8af9b7bc1d2ef7f29d3afee2226bd03ca3875c16451ad5a7723 \ + --hash=sha256:bdac3c7d9b705d253b2ce370fde941836a5f8b3c5c2b8fd70940a3ea3af7f4f2 \ + --hash=sha256:c03eff4a41bd4e38415cbed054bbaff4a075b093e2394b6915dca34a40d1e38b \ + --hash=sha256:c16d2fa63e0800723139137d667e1056bee1a1cf7965153d2d104b62855e9b99 \ + --hash=sha256:c1fac3e2ace2eb1052e9f7c7db480818371134410e1f5c55d65e8f3ac6d1407e \ + --hash=sha256:ce3aa154c452d2467487765e3adc730a8c153af77ad84096bc19ce19a2400840 \ + --hash=sha256:cee6798eaf8b1416ef6909b06f7dc04b60755206bddc599f52232606e18179d3 \ + --hash=sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb \ + --hash=sha256:d994863bba198a4a518b467bb971c56e1db3f180a25c6cf7bb1949c267f748c3 \ + --hash=sha256:dd47a5181ce5fcb463b5d9e17ecfdb02b678cca31280639255ce9d0e5aa67af0 \ + --hash=sha256:dd94994fc91a6177bfaafd7d9fd951bc8689b0a98168aa26b5f543868548d3ca \ + --hash=sha256:de537c11e4aa01d37db0d403b57bd6f0546e71a82347a97c6a9f0dcc532b3a45 \ + --hash=sha256:df6e2f325bfee1f49f81aaac97d2aa757c7646534a06f8f577ce184afe2f0a9e \ + --hash=sha256:e66cc454f97053b79c2ab09c17fbe3c825ea6b4de20baf1be28919460dd7877f \ + --hash=sha256:e79225373c317ff1e35f210dd5f1344ff31066ba8067c307ab60254cd3a78ad5 \ + --hash=sha256:f1577515d35ed5649d52ab4319db757bb881ce3b2b796d7283e6634d99ace307 \ + --hash=sha256:f1e6540b7fa044eee0bb5111ada694cf3dc15f2b0347ca125ee9ca984d5e9e6e \ + --hash=sha256:f2ac49a9bedb996086057b75bf93538240538c6d9b38e57c82d51f75a73409d2 \ + --hash=sha256:f47c9c9028f55a04ac254346e92977bf0f166c483c74b4232bee19a6697e4778 \ + --hash=sha256:f5f9da7f5dbc00a604fe74aa02ae7c98bcede8a3b8b9666f9f86fc13993bc71a \ + --hash=sha256:fd74520371c3c4175142d02a976aee0b4cb4a7cc912a60586ffd8d5929979b30 \ + --hash=sha256:feeb64bc9bcc6b45c6311c9e9b99406660a9c05ca8a5b30d14a78555088b0b3a + # via + # aiohttp + # aiosignal +google-api-core==2.24.0 \ + --hash=sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9 \ + --hash=sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf # via -r requirements.in -google-auth==2.34.0 \ - --hash=sha256:72fd4733b80b6d777dcde515628a9eb4a577339437012874ea286bca7261ee65 \ - --hash=sha256:8eb87396435c19b20d32abd2f984e31c191a15284af72eb922f10e5bde9c04cc +google-auth==2.37.0 \ + --hash=sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00 \ + --hash=sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0 # via google-api-core -googleapis-common-protos[grpc]==1.65.0 \ - --hash=sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63 \ - --hash=sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0 +googleapis-common-protos[grpc]==1.66.0 \ + --hash=sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c \ + --hash=sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed # via # -r requirements.in # google-api-core @@ -131,60 +336,71 @@ grpc-google-iam-v1==0.13.1 \ --hash=sha256:3ff4b2fd9d990965e410965253c0da6f66205d5a8291c4c31c6ebecca18a9001 \ --hash=sha256:c3e86151a981811f30d5e7330f271cee53e73bb87755e88cc3b6f0c7b5fe374e # via -r requirements.in -grpcio==1.66.1 \ - --hash=sha256:0e6c9b42ded5d02b6b1fea3a25f036a2236eeb75d0579bfd43c0018c88bf0a3e \ - --hash=sha256:161d5c535c2bdf61b95080e7f0f017a1dfcb812bf54093e71e5562b16225b4ce \ - --hash=sha256:17663598aadbedc3cacd7bbde432f541c8e07d2496564e22b214b22c7523dac8 \ - --hash=sha256:1c17ebcec157cfb8dd445890a03e20caf6209a5bd4ac5b040ae9dbc59eef091d \ - --hash=sha256:292a846b92cdcd40ecca46e694997dd6b9be6c4c01a94a0dfb3fcb75d20da858 \ - --hash=sha256:2ca2559692d8e7e245d456877a85ee41525f3ed425aa97eb7a70fc9a79df91a0 \ - --hash=sha256:307b1d538140f19ccbd3aed7a93d8f71103c5d525f3c96f8616111614b14bf2a \ - --hash=sha256:30a1c2cf9390c894c90bbc70147f2372130ad189cffef161f0432d0157973f45 \ - --hash=sha256:31a049daa428f928f21090403e5d18ea02670e3d5d172581670be006100db9ef \ - --hash=sha256:35334f9c9745add3e357e3372756fd32d925bd52c41da97f4dfdafbde0bf0ee2 \ - --hash=sha256:3750c5a00bd644c75f4507f77a804d0189d97a107eb1481945a0cf3af3e7a5ac \ - --hash=sha256:3885f037eb11f1cacc41f207b705f38a44b69478086f40608959bf5ad85826dd \ - --hash=sha256:4573608e23f7e091acfbe3e84ac2045680b69751d8d67685ffa193a4429fedb1 \ - --hash=sha256:4825a3aa5648010842e1c9d35a082187746aa0cdbf1b7a2a930595a94fb10fce \ - --hash=sha256:4877ba180591acdf127afe21ec1c7ff8a5ecf0fe2600f0d3c50e8c4a1cbc6492 \ - --hash=sha256:48b0d92d45ce3be2084b92fb5bae2f64c208fea8ceed7fccf6a7b524d3c4942e \ - --hash=sha256:4d813316d1a752be6f5c4360c49f55b06d4fe212d7df03253dfdae90c8a402bb \ - --hash=sha256:5dd67ed9da78e5121efc5c510f0122a972216808d6de70953a740560c572eb44 \ - --hash=sha256:6f914386e52cbdeb5d2a7ce3bf1fdfacbe9d818dd81b6099a05b741aaf3848bb \ - --hash=sha256:7101db1bd4cd9b880294dec41a93fcdce465bdbb602cd8dc5bd2d6362b618759 \ - --hash=sha256:7e06aa1f764ec8265b19d8f00140b8c4b6ca179a6dc67aa9413867c47e1fb04e \ - --hash=sha256:84ca1be089fb4446490dd1135828bd42a7c7f8421e74fa581611f7afdf7ab761 \ - --hash=sha256:8a1e224ce6f740dbb6b24c58f885422deebd7eb724aff0671a847f8951857c26 \ - --hash=sha256:97ae7edd3f3f91480e48ede5d3e7d431ad6005bfdbd65c1b56913799ec79e791 \ - --hash=sha256:9c9bebc6627873ec27a70fc800f6083a13c70b23a5564788754b9ee52c5aef6c \ - --hash=sha256:a013c5fbb12bfb5f927444b477a26f1080755a931d5d362e6a9a720ca7dbae60 \ - --hash=sha256:a66fe4dc35d2330c185cfbb42959f57ad36f257e0cc4557d11d9f0a3f14311df \ - --hash=sha256:a92c4f58c01c77205df6ff999faa008540475c39b835277fb8883b11cada127a \ - --hash=sha256:aa8ba945c96e73de29d25331b26f3e416e0c0f621e984a3ebdb2d0d0b596a3b3 \ - --hash=sha256:b0aa03d240b5539648d996cc60438f128c7f46050989e35b25f5c18286c86734 \ - --hash=sha256:b1b24c23d51a1e8790b25514157d43f0a4dce1ac12b3f0b8e9f66a5e2c4c132f \ - --hash=sha256:b7ffb8ea674d68de4cac6f57d2498fef477cef582f1fa849e9f844863af50083 \ - --hash=sha256:b9feb4e5ec8dc2d15709f4d5fc367794d69277f5d680baf1910fc9915c633524 \ - --hash=sha256:bff2096bdba686019fb32d2dde45b95981f0d1490e054400f70fc9a8af34b49d \ - --hash=sha256:c30aeceeaff11cd5ddbc348f37c58bcb96da8d5aa93fed78ab329de5f37a0d7a \ - --hash=sha256:c9f80f9fad93a8cf71c7f161778ba47fd730d13a343a46258065c4deb4b550c0 \ - --hash=sha256:cfd349de4158d797db2bd82d2020554a121674e98fbe6b15328456b3bf2495bb \ - --hash=sha256:d0cd7050397b3609ea51727b1811e663ffda8bda39c6a5bb69525ef12414b503 \ - --hash=sha256:d639c939ad7c440c7b2819a28d559179a4508783f7e5b991166f8d7a34b52815 \ - --hash=sha256:e3ba04659e4fce609de2658fe4dbf7d6ed21987a94460f5f92df7579fd5d0e22 \ - --hash=sha256:ecfe735e7a59e5a98208447293ff8580e9db1e890e232b8b292dc8bd15afc0d2 \ - --hash=sha256:ef82d361ed5849d34cf09105d00b94b6728d289d6b9235513cb2fcc79f7c432c \ - --hash=sha256:f03a5884c56256e08fd9e262e11b5cfacf1af96e2ce78dc095d2c41ccae2c80d \ - --hash=sha256:f1fe60d0772831d96d263b53d83fb9a3d050a94b0e94b6d004a5ad111faa5b5b \ - --hash=sha256:f517fd7259fe823ef3bd21e508b653d5492e706e9f0ef82c16ce3347a8a5620c \ - --hash=sha256:fdb14bad0835914f325349ed34a51940bc2ad965142eb3090081593c6e347be9 +grpcio==1.68.1 \ + --hash=sha256:025f790c056815b3bf53da850dd70ebb849fd755a4b1ac822cb65cd631e37d43 \ + --hash=sha256:04cfd68bf4f38f5bb959ee2361a7546916bd9a50f78617a346b3aeb2b42e2161 \ + --hash=sha256:0feb02205a27caca128627bd1df4ee7212db051019a9afa76f4bb6a1a80ca95e \ + --hash=sha256:1098f03dedc3b9810810568060dea4ac0822b4062f537b0f53aa015269be0a76 \ + --hash=sha256:12941d533f3cd45d46f202e3667be8ebf6bcb3573629c7ec12c3e211d99cfccf \ + --hash=sha256:255b1635b0ed81e9f91da4fcc8d43b7ea5520090b9a9ad9340d147066d1d3613 \ + --hash=sha256:298ee7f80e26f9483f0b6f94cc0a046caf54400a11b644713bb5b3d8eb387600 \ + --hash=sha256:2c4cec6177bf325eb6faa6bd834d2ff6aa8bb3b29012cceb4937b86f8b74323c \ + --hash=sha256:2cc1fd04af8399971bcd4f43bd98c22d01029ea2e56e69c34daf2bf8470e47f5 \ + --hash=sha256:334ab917792904245a028f10e803fcd5b6f36a7b2173a820c0b5b076555825e1 \ + --hash=sha256:3522c77d7e6606d6665ec8d50e867f13f946a4e00c7df46768f1c85089eae515 \ + --hash=sha256:37ea3be171f3cf3e7b7e412a98b77685eba9d4fd67421f4a34686a63a65d99f9 \ + --hash=sha256:390eee4225a661c5cd133c09f5da1ee3c84498dc265fd292a6912b65c421c78c \ + --hash=sha256:3aed6544e4d523cd6b3119b0916cef3d15ef2da51e088211e4d1eb91a6c7f4f1 \ + --hash=sha256:3ceb56c4285754e33bb3c2fa777d055e96e6932351a3082ce3559be47f8024f0 \ + --hash=sha256:44a8502dd5de653ae6a73e2de50a401d84184f0331d0ac3daeb044e66d5c5054 \ + --hash=sha256:4b177f5547f1b995826ef529d2eef89cca2f830dd8b2c99ffd5fde4da734ba73 \ + --hash=sha256:4efac5481c696d5cb124ff1c119a78bddbfdd13fc499e3bc0ca81e95fc573684 \ + --hash=sha256:52fbf85aa71263380d330f4fce9f013c0798242e31ede05fcee7fbe40ccfc20d \ + --hash=sha256:55857c71641064f01ff0541a1776bfe04a59db5558e82897d35a7793e525774c \ + --hash=sha256:66a24f3d45c33550703f0abb8b656515b0ab777970fa275693a2f6dc8e35f1c1 \ + --hash=sha256:6ab2d912ca39c51f46baf2a0d92aa265aa96b2443266fc50d234fa88bf877d8e \ + --hash=sha256:77d65165fc35cff6e954e7fd4229e05ec76102d4406d4576528d3a3635fc6172 \ + --hash=sha256:7dfc914cc31c906297b30463dde0b9be48e36939575eaf2a0a22a8096e69afe5 \ + --hash=sha256:7f20ebec257af55694d8f993e162ddf0d36bd82d4e57f74b31c67b3c6d63d8b2 \ + --hash=sha256:80af6f1e69c5e68a2be529990684abdd31ed6622e988bf18850075c81bb1ad6e \ + --hash=sha256:83bbf5807dc3ee94ce1de2dfe8a356e1d74101e4b9d7aa8c720cc4818a34aded \ + --hash=sha256:8720c25cd9ac25dd04ee02b69256d0ce35bf8a0f29e20577427355272230965a \ + --hash=sha256:8829924fffb25386995a31998ccbbeaa7367223e647e0122043dfc485a87c666 \ + --hash=sha256:8a3869a6661ec8f81d93f4597da50336718bde9eb13267a699ac7e0a1d6d0bea \ + --hash=sha256:8cb620037a2fd9eeee97b4531880e439ebfcd6d7d78f2e7dcc3726428ab5ef63 \ + --hash=sha256:919d7f18f63bcad3a0f81146188e90274fde800a94e35d42ffe9eadf6a9a6330 \ + --hash=sha256:95c87ce2a97434dffe7327a4071839ab8e8bffd0054cc74cbe971fba98aedd60 \ + --hash=sha256:963cc8d7d79b12c56008aabd8b457f400952dbea8997dd185f155e2f228db079 \ + --hash=sha256:96f473cdacfdd506008a5d7579c9f6a7ff245a9ade92c3c0265eb76cc591914f \ + --hash=sha256:9d1fae6bbf0816415b81db1e82fb3bf56f7857273c84dcbe68cbe046e58e1ccd \ + --hash=sha256:a0c8ddabef9c8f41617f213e527254c41e8b96ea9d387c632af878d05db9229c \ + --hash=sha256:a1b988b40f2fd9de5c820f3a701a43339d8dcf2cb2f1ca137e2c02671cc83ac1 \ + --hash=sha256:a47faedc9ea2e7a3b6569795c040aae5895a19dde0c728a48d3c5d7995fda385 \ + --hash=sha256:a8040f85dcb9830d8bbb033ae66d272614cec6faceee88d37a88a9bd1a7a704e \ + --hash=sha256:b33bd114fa5a83f03ec6b7b262ef9f5cac549d4126f1dc702078767b10c46ed9 \ + --hash=sha256:c08079b4934b0bf0a8847f42c197b1d12cba6495a3d43febd7e99ecd1cdc8d54 \ + --hash=sha256:c28848761a6520c5c6071d2904a18d339a796ebe6b800adc8b3f474c5ce3c3ad \ + --hash=sha256:cb400138e73969eb5e0535d1d06cae6a6f7a15f2cc74add320e2130b8179211a \ + --hash=sha256:cbb5780e2e740b6b4f2d208e90453591036ff80c02cc605fea1af8e6fc6b1bbe \ + --hash=sha256:ccf2ebd2de2d6661e2520dae293298a3803a98ebfc099275f113ce1f6c2a80f1 \ + --hash=sha256:d35740e3f45f60f3c37b1e6f2f4702c23867b9ce21c6410254c9c682237da68d \ + --hash=sha256:d99abcd61760ebb34bdff37e5a3ba333c5cc09feda8c1ad42547bea0416ada78 \ + --hash=sha256:ddda1aa22495d8acd9dfbafff2866438d12faec4d024ebc2e656784d96328ad0 \ + --hash=sha256:dffd29a2961f3263a16d73945b57cd44a8fd0b235740cb14056f0612329b345e \ + --hash=sha256:e4842e4872ae4ae0f5497bf60a0498fa778c192cc7a9e87877abd2814aca9475 \ + --hash=sha256:e8dbe3e00771bfe3d04feed8210fc6617006d06d9a2679b74605b9fed3e8362c \ + --hash=sha256:ee2e743e51cb964b4975de572aa8fb95b633f496f9fcb5e257893df3be854746 \ + --hash=sha256:eeb38ff04ab6e5756a2aef6ad8d94e89bb4a51ef96e20f45c44ba190fa0bcaad \ + --hash=sha256:f8261fa2a5f679abeb2a0a93ad056d765cdca1c47745eda3f2d87f874ff4b8c9 # via # googleapis-common-protos # grpc-google-iam-v1 -idna==3.8 \ - --hash=sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac \ - --hash=sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603 - # via requests +idna==3.10 \ + --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ + --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 + # via + # requests + # yarl inflection==0.5.1 \ --hash=sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417 \ --hash=sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2 @@ -197,156 +413,351 @@ jinja2==3.1.4 \ --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d # via -r requirements.in -libcst==1.4.0 \ - --hash=sha256:061d6855ef30efe38b8a292b7e5d57c8e820e71fc9ec9846678b60a934b53bbb \ - --hash=sha256:17d71001cb25e94cfe8c3d997095741a8c4aa7a6d234c0f972bc42818c88dfaf \ - --hash=sha256:279b54568ea1f25add50ea4ba3d76d4f5835500c82f24d54daae4c5095b986aa \ - --hash=sha256:2d47de16d105e7dd5f4e01a428d9f4dc1e71efd74f79766daf54528ce37f23c3 \ - --hash=sha256:3399e6c95df89921511b44d8c5bf6a75bcbc2d51f1f6429763609ba005c10f6b \ - --hash=sha256:3401dae41fe24565387a65baee3887e31a44e3e58066b0250bc3f3ccf85b1b5a \ - --hash=sha256:3c6a8faab9da48c5b371557d0999b4ca51f4f2cbd37ee8c2c4df0ac01c781465 \ - --hash=sha256:449e0b16604f054fa7f27c3ffe86ea7ef6c409836fe68fe4e752a1894175db00 \ - --hash=sha256:48601e3e590e2d6a7ab8c019cf3937c70511a78d778ab3333764531253acdb33 \ - --hash=sha256:5da9d7dc83801aba3b8d911f82dc1a375db0d508318bad79d9fb245374afe068 \ - --hash=sha256:62e2682ee1567b6a89c91853865372bf34f178bfd237853d84df2b87b446e654 \ - --hash=sha256:7c54aa66c86d8ece9c93156a2cf5ca512b0dce40142fe9e072c86af2bf892411 \ - --hash=sha256:7ece51d935bc9bf60b528473d2e5cc67cbb88e2f8146297e40ee2c7d80be6f13 \ - --hash=sha256:81653dea1cdfa4c6520a7c5ffb95fa4d220cbd242e446c7a06d42d8636bfcbba \ - --hash=sha256:8e54c777b8d27339b70f304d16fc8bc8674ef1bd34ed05ea874bf4921eb5a313 \ - --hash=sha256:9d0cc3c5a2a51fa7e1d579a828c0a2e46b2170024fd8b1a0691c8a52f3abb2d9 \ - --hash=sha256:addc6d585141a7677591868886f6bda0577529401a59d210aa8112114340e129 \ - --hash=sha256:b8ecdba8934632b4dadacb666cd3816627a6ead831b806336972ccc4ba7ca0e9 \ - --hash=sha256:bb0abf627ee14903d05d0ad9b2c6865f1b21eb4081e2c7bea1033f85db2b8bae \ - --hash=sha256:cb4e42ea107a37bff7f9fdbee9532d39f9ea77b89caa5c5112b37057b12e0838 \ - --hash=sha256:d024f44059a853b4b852cfc04fec33e346659d851371e46fc8e7c19de24d3da9 \ - --hash=sha256:d1989fa12d3cd79118ebd29ebe2a6976d23d509b1a4226bc3d66fcb7cb50bd5d \ - --hash=sha256:e6227562fc5c9c1efd15dfe90b0971ae254461b8b6b23c1b617139b6003de1c1 \ - --hash=sha256:f42797309bb725f0f000510d5463175ccd7155395f09b5e7723971b0007a976d \ - --hash=sha256:f6abce0e66bba2babfadc20530fd3688f672d565674336595b4623cd800b91ef +libcst==1.5.1 \ + --hash=sha256:00f3d2f32ee081bad3394546b0b9ac5e31686d3b5cfe4892d716d2ba65f9ec08 \ + --hash=sha256:01e01c04f0641188160d3b99c6526436e93a3fbf9783dba970f9885a77ec9b38 \ + --hash=sha256:02b38fa4d9f13e79fe69e9b5407b9e173557bcfb5960f7866cf4145af9c7ae09 \ + --hash=sha256:06a9b4c9b76da4a7399e6f1f3a325196fb5febd3ea59fac1f68e2116f3517cd8 \ + --hash=sha256:15697ea9f1edbb9a263364d966c72abda07195d1c1a6838eb79af057f1040770 \ + --hash=sha256:1947790a4fd7d96bcc200a6ecaa528045fcb26a34a24030d5859c7983662289e \ + --hash=sha256:19e39cfef4316599ca20d1c821490aeb783b52e8a8543a824972a525322a85d0 \ + --hash=sha256:1cc7393aaac733e963f0ee00466d059db74a38e15fc7e6a46dddd128c5be8d08 \ + --hash=sha256:1ff21005c33b634957a98db438e882522febf1cacc62fa716f29e163a3f5871a \ + --hash=sha256:26c804fa8091747128579013df0b5f8e6b0c7904d9c4ee83841f136f53e18684 \ + --hash=sha256:2e397f5b6c0fc271acea44579f154b0f3ab36011050f6db75ab00cef47441946 \ + --hash=sha256:3334afe9e7270e175de01198f816b0dc78dda94d9d72152b61851c323e4e741e \ + --hash=sha256:40b75bf2d70fc0bc26b1fa73e61bdc46fef59f5c71aedf16128e7c33db8d5e40 \ + --hash=sha256:40fbbaa8b839bfbfa5b300623ca2b6b0768b58bbc31b341afbc99110c9bee232 \ + --hash=sha256:56c944acaa781b8e586df3019374f5cf117054d7fc98f85be1ba84fe810005dc \ + --hash=sha256:5987daff8389b0df60b5c20499ff4fb73fc03cb3ae1f6a746eefd204ed08df85 \ + --hash=sha256:666813950b8637af0c0e96b1ca46f5d5f183d2fe50bbac2186f5b283a99f3529 \ + --hash=sha256:697eabe9f5ffc40f76d6d02e693274e0a382826d0cf8183bd44e7407dfb0ab90 \ + --hash=sha256:6d9ec764aa781ef35ab96b693569ac3dced16df9feb40ee6c274d13e86a1472e \ + --hash=sha256:71cb294db84df9e410208009c732628e920111683c2f2b2e0c5b71b98464f365 \ + --hash=sha256:72132756f985a19ef64d702a821099d4afc3544974662772b44cbc55b7279727 \ + --hash=sha256:76a8ac7a84f9b6f678a668bff85b360e0a93fa8d7f25a74a206a28110734bb2a \ + --hash=sha256:89c808bdb5fa9ca02df41dd234cbb0e9de0d2e0c029c7063d5435a9f6781cc10 \ + --hash=sha256:940ec4c8db4c2d620a7268d6c83e64ff646e4afd74ae5183d0f0ef3b80e05be0 \ + --hash=sha256:99bbffd8596d192bc0e844a4cf3c4fc696979d4e20ab1c0774a01768a59b47ed \ + --hash=sha256:aa524bd012aaae1f485fd44490ef5abf708b14d2addc0f06b28de3e4585c4b9e \ + --hash=sha256:ab83633e61ee91df575a3838b1e73c371f19d4916bf1816554933235553d41ea \ + --hash=sha256:b58a49895d95ec1fd34fad041a142d98edf9b51fcaf632337c13befeb4d51c7c \ + --hash=sha256:b5a0d3c632aa2b21c5fa145e4e8dbf86f45c9b37a64c0b7221a5a45caf58915a \ + --hash=sha256:b7b58b36022ae77a5a00002854043ae95c03e92f6062ad08473eff326f32efa0 \ + --hash=sha256:bbaf5755be50fa9b35a3d553d1e62293fbb2ee5ce2c16c7e7ffeb2746af1ab88 \ + --hash=sha256:c615af2117320e9a218083c83ec61227d3547e38a0de80329376971765f27a9e \ + --hash=sha256:c7021e3904d8d088c369afc3fe17c279883e583415ef07edacadba76cfbecd27 \ + --hash=sha256:cedd4c8336e01c51913113fbf5566b8f61a86d90f3d5cc5b1cb5049575622c5f \ + --hash=sha256:db7711a762b0327b581be5a963908fecd74412bdda34db34553faa521563c22d \ + --hash=sha256:dc06b7c60d086ef1832aebfd31b64c3c8a645adf0c5638d6243e5838f6a9356e \ + --hash=sha256:ec6ee607cfe4cc4cc93e56e0188fdb9e50399d61a1262d58229752946f288f5e \ + --hash=sha256:eeb13d7c598fe9a798a1d22eae56ab3d3d599b38b83436039bd6ae229fc854d7 \ + --hash=sha256:f053a5deb6a214972dbe9fa26ecd8255edb903de084a3d7715bf9e9da8821c50 \ + --hash=sha256:f3ffb8135c09e41e8cf710b152c33e9b7f1d0d0b9f242bae0c502eb082fdb1fb \ + --hash=sha256:fbccb016b1ac6d892344300dcccc8a16887b71bb7f875ba56c0ed6c1a7ade8be # via -r requirements.in -markupsafe==2.1.5 \ - --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ - --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \ - --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \ - --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \ - --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \ - --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \ - --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \ - --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \ - --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \ - --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \ - --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \ - --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \ - --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \ - --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \ - --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \ - --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \ - --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \ - --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \ - --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \ - --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \ - --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \ - --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \ - --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \ - --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \ - --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \ - --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \ - --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \ - --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \ - --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \ - --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \ - --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \ - --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \ - --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \ - --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \ - --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \ - --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \ - --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \ - --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \ - --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \ - --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \ - --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \ - --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \ - --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \ - --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \ - --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \ - --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \ - --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \ - --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \ - --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \ - --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \ - --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \ - --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \ - --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \ - --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \ - --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \ - --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \ - --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \ - --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \ - --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \ - --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68 +markupsafe==3.0.2 \ + --hash=sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4 \ + --hash=sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30 \ + --hash=sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0 \ + --hash=sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9 \ + --hash=sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396 \ + --hash=sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13 \ + --hash=sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028 \ + --hash=sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca \ + --hash=sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557 \ + --hash=sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832 \ + --hash=sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0 \ + --hash=sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b \ + --hash=sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579 \ + --hash=sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a \ + --hash=sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c \ + --hash=sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff \ + --hash=sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c \ + --hash=sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22 \ + --hash=sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094 \ + --hash=sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb \ + --hash=sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e \ + --hash=sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5 \ + --hash=sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a \ + --hash=sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d \ + --hash=sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a \ + --hash=sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b \ + --hash=sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8 \ + --hash=sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225 \ + --hash=sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c \ + --hash=sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144 \ + --hash=sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f \ + --hash=sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87 \ + --hash=sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d \ + --hash=sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93 \ + --hash=sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf \ + --hash=sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158 \ + --hash=sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84 \ + --hash=sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb \ + --hash=sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48 \ + --hash=sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171 \ + --hash=sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c \ + --hash=sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6 \ + --hash=sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd \ + --hash=sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d \ + --hash=sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1 \ + --hash=sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d \ + --hash=sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca \ + --hash=sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a \ + --hash=sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29 \ + --hash=sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe \ + --hash=sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798 \ + --hash=sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c \ + --hash=sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8 \ + --hash=sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f \ + --hash=sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f \ + --hash=sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a \ + --hash=sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178 \ + --hash=sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0 \ + --hash=sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79 \ + --hash=sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430 \ + --hash=sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50 # via # -r requirements.in # jinja2 -mypy-extensions==1.0.0 \ - --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \ - --hash=sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782 - # via typing-inspect -packaging==24.1 \ - --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ - --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 +multidict==6.1.0 \ + --hash=sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f \ + --hash=sha256:06809f4f0f7ab7ea2cabf9caca7d79c22c0758b58a71f9d32943ae13c7ace056 \ + --hash=sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761 \ + --hash=sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3 \ + --hash=sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b \ + --hash=sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6 \ + --hash=sha256:10524ebd769727ac77ef2278390fb0068d83f3acb7773792a5080f2b0abf7748 \ + --hash=sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966 \ + --hash=sha256:16e5f4bf4e603eb1fdd5d8180f1a25f30056f22e55ce51fb3d6ad4ab29f7d96f \ + --hash=sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1 \ + --hash=sha256:189f652a87e876098bbc67b4da1049afb5f5dfbaa310dd67c594b01c10388db6 \ + --hash=sha256:1ca0083e80e791cffc6efce7660ad24af66c8d4079d2a750b29001b53ff59ada \ + --hash=sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305 \ + --hash=sha256:2090f6a85cafc5b2db085124d752757c9d251548cedabe9bd31afe6363e0aff2 \ + --hash=sha256:20b9b5fbe0b88d0bdef2012ef7dee867f874b72528cf1d08f1d59b0e3850129d \ + --hash=sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a \ + --hash=sha256:22f3105d4fb15c8f57ff3959a58fcab6ce36814486500cd7485651230ad4d4ef \ + --hash=sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c \ + --hash=sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb \ + --hash=sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60 \ + --hash=sha256:3702ea6872c5a2a4eeefa6ffd36b042e9773f05b1f37ae3ef7264b1163c2dcf6 \ + --hash=sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4 \ + --hash=sha256:3914f5aaa0f36d5d60e8ece6a308ee1c9784cd75ec8151062614657a114c4478 \ + --hash=sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81 \ + --hash=sha256:3c8b88a2ccf5493b6c8da9076fb151ba106960a2df90c2633f342f120751a9e7 \ + --hash=sha256:3e97b5e938051226dc025ec80980c285b053ffb1e25a3db2a3aa3bc046bf7f56 \ + --hash=sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3 \ + --hash=sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6 \ + --hash=sha256:483a6aea59cb89904e1ceabd2b47368b5600fb7de78a6e4a2c2987b2d256cf30 \ + --hash=sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb \ + --hash=sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506 \ + --hash=sha256:4a9cb68166a34117d6646c0023c7b759bf197bee5ad4272f420a0141d7eb03a0 \ + --hash=sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925 \ + --hash=sha256:4e18b656c5e844539d506a0a06432274d7bd52a7487e6828c63a63d69185626c \ + --hash=sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6 \ + --hash=sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e \ + --hash=sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95 \ + --hash=sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2 \ + --hash=sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133 \ + --hash=sha256:5845c1fd4866bb5dd3125d89b90e57ed3138241540897de748cdf19de8a2fca2 \ + --hash=sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa \ + --hash=sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3 \ + --hash=sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3 \ + --hash=sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436 \ + --hash=sha256:682b987361e5fd7a139ed565e30d81fd81e9629acc7d925a205366877d8c8657 \ + --hash=sha256:6b5d83030255983181005e6cfbac1617ce9746b219bc2aad52201ad121226581 \ + --hash=sha256:6bb5992037f7a9eff7991ebe4273ea7f51f1c1c511e6a2ce511d0e7bdb754492 \ + --hash=sha256:73eae06aa53af2ea5270cc066dcaf02cc60d2994bbb2c4ef5764949257d10f43 \ + --hash=sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2 \ + --hash=sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2 \ + --hash=sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926 \ + --hash=sha256:87701f25a2352e5bf7454caa64757642734da9f6b11384c1f9d1a8e699758057 \ + --hash=sha256:9079dfc6a70abe341f521f78405b8949f96db48da98aeb43f9907f342f627cdc \ + --hash=sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80 \ + --hash=sha256:957cf8e4b6e123a9eea554fa7ebc85674674b713551de587eb318a2df3e00255 \ + --hash=sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1 \ + --hash=sha256:9f636b730f7e8cb19feb87094949ba54ee5357440b9658b2a32a5ce4bce53972 \ + --hash=sha256:a114d03b938376557927ab23f1e950827c3b893ccb94b62fd95d430fd0e5cf53 \ + --hash=sha256:a185f876e69897a6f3325c3f19f26a297fa058c5e456bfcff8015e9a27e83ae1 \ + --hash=sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423 \ + --hash=sha256:aa466da5b15ccea564bdab9c89175c762bc12825f4659c11227f515cee76fa4a \ + --hash=sha256:aaed8b0562be4a0876ee3b6946f6869b7bcdb571a5d1496683505944e268b160 \ + --hash=sha256:ab7c4ceb38d91570a650dba194e1ca87c2b543488fe9309b4212694174fd539c \ + --hash=sha256:ac10f4c2b9e770c4e393876e35a7046879d195cd123b4f116d299d442b335bcd \ + --hash=sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa \ + --hash=sha256:b1c416351ee6271b2f49b56ad7f308072f6f44b37118d69c2cad94f3fa8a40d5 \ + --hash=sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b \ + --hash=sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa \ + --hash=sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef \ + --hash=sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44 \ + --hash=sha256:c08be4f460903e5a9d0f76818db3250f12e9c344e79314d1d570fc69d7f4eae4 \ + --hash=sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156 \ + --hash=sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753 \ + --hash=sha256:ce2186a7df133a9c895dea3331ddc5ddad42cdd0d1ea2f0a51e5d161e4762f28 \ + --hash=sha256:d093be959277cb7dee84b801eb1af388b6ad3ca6a6b6bf1ed7585895789d027d \ + --hash=sha256:d094ddec350a2fb899fec68d8353c78233debde9b7d8b4beeafa70825f1c281a \ + --hash=sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304 \ + --hash=sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008 \ + --hash=sha256:d618649d4e70ac6efcbba75be98b26ef5078faad23592f9b51ca492953012429 \ + --hash=sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72 \ + --hash=sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399 \ + --hash=sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3 \ + --hash=sha256:db7457bac39421addd0c8449933ac32d8042aae84a14911a757ae6ca3eef1392 \ + --hash=sha256:e27bbb6d14416713a8bd7aaa1313c0fc8d44ee48d74497a0ff4c3a1b6ccb5167 \ + --hash=sha256:e617fb6b0b6953fffd762669610c1c4ffd05632c138d61ac7e14ad187870669c \ + --hash=sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774 \ + --hash=sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351 \ + --hash=sha256:f179dee3b863ab1c59580ff60f9d99f632f34ccb38bf67a33ec6b3ecadd0fd76 \ + --hash=sha256:f4c035da3f544b1882bac24115f3e2e8760f10a0107614fc9839fd232200b875 \ + --hash=sha256:f67f217af4b1ff66c68a87318012de788dd95fcfeb24cc889011f4e1c7454dfd \ + --hash=sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28 \ + --hash=sha256:ff3827aef427c89a25cc96ded1759271a93603aba9fb977a6d264648ebf989db + # via + # aiohttp + # yarl +packaging==24.2 \ + --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ + --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f # via pytest pluggy==1.5.0 \ --hash=sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1 \ --hash=sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669 # via pytest -proto-plus==1.24.0 \ - --hash=sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445 \ - --hash=sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12 +propcache==0.2.1 \ + --hash=sha256:03ff9d3f665769b2a85e6157ac8b439644f2d7fd17615a82fa55739bc97863f4 \ + --hash=sha256:049324ee97bb67285b49632132db351b41e77833678432be52bdd0289c0e05e4 \ + --hash=sha256:081a430aa8d5e8876c6909b67bd2d937bfd531b0382d3fdedb82612c618bc41a \ + --hash=sha256:0f022d381747f0dfe27e99d928e31bc51a18b65bb9e481ae0af1380a6725dd1f \ + --hash=sha256:12d1083f001ace206fe34b6bdc2cb94be66d57a850866f0b908972f90996b3e9 \ + --hash=sha256:14d86fe14b7e04fa306e0c43cdbeebe6b2c2156a0c9ce56b815faacc193e320d \ + --hash=sha256:160291c60081f23ee43d44b08a7e5fb76681221a8e10b3139618c5a9a291b84e \ + --hash=sha256:1672137af7c46662a1c2be1e8dc78cb6d224319aaa40271c9257d886be4363a6 \ + --hash=sha256:19a0f89a7bb9d8048d9c4370c9c543c396e894c76be5525f5e1ad287f1750ddf \ + --hash=sha256:1ac2f5fe02fa75f56e1ad473f1175e11f475606ec9bd0be2e78e4734ad575034 \ + --hash=sha256:1cd9a1d071158de1cc1c71a26014dcdfa7dd3d5f4f88c298c7f90ad6f27bb46d \ + --hash=sha256:1ffc3cca89bb438fb9c95c13fc874012f7b9466b89328c3c8b1aa93cdcfadd16 \ + --hash=sha256:297878dc9d0a334358f9b608b56d02e72899f3b8499fc6044133f0d319e2ec30 \ + --hash=sha256:2d3af2e79991102678f53e0dbf4c35de99b6b8b58f29a27ca0325816364caaba \ + --hash=sha256:30b43e74f1359353341a7adb783c8f1b1c676367b011709f466f42fda2045e95 \ + --hash=sha256:3156628250f46a0895f1f36e1d4fbe062a1af8718ec3ebeb746f1d23f0c5dc4d \ + --hash=sha256:31f5af773530fd3c658b32b6bdc2d0838543de70eb9a2156c03e410f7b0d3aae \ + --hash=sha256:3935bfa5fede35fb202c4b569bb9c042f337ca4ff7bd540a0aa5e37131659348 \ + --hash=sha256:39d51fbe4285d5db5d92a929e3e21536ea3dd43732c5b177c7ef03f918dff9f2 \ + --hash=sha256:3f77ce728b19cb537714499928fe800c3dda29e8d9428778fc7c186da4c09a64 \ + --hash=sha256:4160d9283bd382fa6c0c2b5e017acc95bc183570cd70968b9202ad6d8fc48dce \ + --hash=sha256:4a571d97dbe66ef38e472703067021b1467025ec85707d57e78711c085984e54 \ + --hash=sha256:4e6281aedfca15301c41f74d7005e6e3f4ca143584ba696ac69df4f02f40d629 \ + --hash=sha256:52277518d6aae65536e9cea52d4e7fd2f7a66f4aa2d30ed3f2fcea620ace3c54 \ + --hash=sha256:556fc6c10989f19a179e4321e5d678db8eb2924131e64652a51fe83e4c3db0e1 \ + --hash=sha256:574faa3b79e8ebac7cb1d7930f51184ba1ccf69adfdec53a12f319a06030a68b \ + --hash=sha256:58791550b27d5488b1bb52bc96328456095d96206a250d28d874fafe11b3dfaf \ + --hash=sha256:5b750a8e5a1262434fb1517ddf64b5de58327f1adc3524a5e44c2ca43305eb0b \ + --hash=sha256:5d97151bc92d2b2578ff7ce779cdb9174337390a535953cbb9452fb65164c587 \ + --hash=sha256:5eee736daafa7af6d0a2dc15cc75e05c64f37fc37bafef2e00d77c14171c2097 \ + --hash=sha256:6445804cf4ec763dc70de65a3b0d9954e868609e83850a47ca4f0cb64bd79fea \ + --hash=sha256:647894f5ae99c4cf6bb82a1bb3a796f6e06af3caa3d32e26d2350d0e3e3faf24 \ + --hash=sha256:66d4cfda1d8ed687daa4bc0274fcfd5267873db9a5bc0418c2da19273040eeb7 \ + --hash=sha256:6a9a8c34fb7bb609419a211e59da8887eeca40d300b5ea8e56af98f6fbbb1541 \ + --hash=sha256:6b3f39a85d671436ee3d12c017f8fdea38509e4f25b28eb25877293c98c243f6 \ + --hash=sha256:6b6fb63ae352e13748289f04f37868099e69dba4c2b3e271c46061e82c745634 \ + --hash=sha256:70693319e0b8fd35dd863e3e29513875eb15c51945bf32519ef52927ca883bc3 \ + --hash=sha256:781e65134efaf88feb447e8c97a51772aa75e48b794352f94cb7ea717dedda0d \ + --hash=sha256:819ce3b883b7576ca28da3861c7e1a88afd08cc8c96908e08a3f4dd64a228034 \ + --hash=sha256:857112b22acd417c40fa4595db2fe28ab900c8c5fe4670c7989b1c0230955465 \ + --hash=sha256:887d9b0a65404929641a9fabb6452b07fe4572b269d901d622d8a34a4e9043b2 \ + --hash=sha256:8b3489ff1ed1e8315674d0775dc7d2195fb13ca17b3808721b54dbe9fd020faf \ + --hash=sha256:92fc4500fcb33899b05ba73276dfb684a20d31caa567b7cb5252d48f896a91b1 \ + --hash=sha256:9403db39be1393618dd80c746cb22ccda168efce239c73af13c3763ef56ffc04 \ + --hash=sha256:98110aa363f1bb4c073e8dcfaefd3a5cea0f0834c2aab23dda657e4dab2f53b5 \ + --hash=sha256:999779addc413181912e984b942fbcc951be1f5b3663cd80b2687758f434c583 \ + --hash=sha256:9caac6b54914bdf41bcc91e7eb9147d331d29235a7c967c150ef5df6464fd1bb \ + --hash=sha256:a7a078f5d37bee6690959c813977da5291b24286e7b962e62a94cec31aa5188b \ + --hash=sha256:a7e65eb5c003a303b94aa2c3852ef130230ec79e349632d030e9571b87c4698c \ + --hash=sha256:a96dc1fa45bd8c407a0af03b2d5218392729e1822b0c32e62c5bf7eeb5fb3958 \ + --hash=sha256:aca405706e0b0a44cc6bfd41fbe89919a6a56999157f6de7e182a990c36e37bc \ + --hash=sha256:accb6150ce61c9c4b7738d45550806aa2b71c7668c6942f17b0ac182b6142fd4 \ + --hash=sha256:ad1af54a62ffe39cf34db1aa6ed1a1873bd548f6401db39d8e7cd060b9211f82 \ + --hash=sha256:ae1aa1cd222c6d205853b3013c69cd04515f9d6ab6de4b0603e2e1c33221303e \ + --hash=sha256:b2d0a12018b04f4cb820781ec0dffb5f7c7c1d2a5cd22bff7fb055a2cb19ebce \ + --hash=sha256:b480c6a4e1138e1aa137c0079b9b6305ec6dcc1098a8ca5196283e8a49df95a9 \ + --hash=sha256:b74c261802d3d2b85c9df2dfb2fa81b6f90deeef63c2db9f0e029a3cac50b518 \ + --hash=sha256:ba278acf14471d36316159c94a802933d10b6a1e117b8554fe0d0d9b75c9d536 \ + --hash=sha256:bb6178c241278d5fe853b3de743087be7f5f4c6f7d6d22a3b524d323eecec505 \ + --hash=sha256:bf72af5e0fb40e9babf594308911436c8efde3cb5e75b6f206c34ad18be5c052 \ + --hash=sha256:bfd3223c15bebe26518d58ccf9a39b93948d3dcb3e57a20480dfdd315356baff \ + --hash=sha256:c214999039d4f2a5b2073ac506bba279945233da8c786e490d411dfc30f855c1 \ + --hash=sha256:c2f992c07c0fca81655066705beae35fc95a2fa7366467366db627d9f2ee097f \ + --hash=sha256:cba4cfa1052819d16699e1d55d18c92b6e094d4517c41dd231a8b9f87b6fa681 \ + --hash=sha256:cea7daf9fc7ae6687cf1e2c049752f19f146fdc37c2cc376e7d0032cf4f25347 \ + --hash=sha256:cf6c4150f8c0e32d241436526f3c3f9cbd34429492abddbada2ffcff506c51af \ + --hash=sha256:d09c333d36c1409d56a9d29b3a1b800a42c76a57a5a8907eacdbce3f18768246 \ + --hash=sha256:d27b84d5880f6d8aa9ae3edb253c59d9f6642ffbb2c889b78b60361eed449787 \ + --hash=sha256:d2ccec9ac47cf4e04897619c0e0c1a48c54a71bdf045117d3a26f80d38ab1fb0 \ + --hash=sha256:d71264a80f3fcf512eb4f18f59423fe82d6e346ee97b90625f283df56aee103f \ + --hash=sha256:d93f3307ad32a27bda2e88ec81134b823c240aa3abb55821a8da553eed8d9439 \ + --hash=sha256:d9631c5e8b5b3a0fda99cb0d29c18133bca1e18aea9effe55adb3da1adef80d3 \ + --hash=sha256:ddfab44e4489bd79bda09d84c430677fc7f0a4939a73d2bba3073036f487a0a6 \ + --hash=sha256:e7048abd75fe40712005bcfc06bb44b9dfcd8e101dda2ecf2f5aa46115ad07ca \ + --hash=sha256:e73091191e4280403bde6c9a52a6999d69cdfde498f1fdf629105247599b57ec \ + --hash=sha256:e800776a79a5aabdb17dcc2346a7d66d0777e942e4cd251defeb084762ecd17d \ + --hash=sha256:edc9fc7051e3350643ad929df55c451899bb9ae6d24998a949d2e4c87fb596d3 \ + --hash=sha256:f089118d584e859c62b3da0892b88a83d611c2033ac410e929cb6754eec0ed16 \ + --hash=sha256:f174bbd484294ed9fdf09437f889f95807e5f229d5d93588d34e92106fbf6717 \ + --hash=sha256:f508b0491767bb1f2b87fdfacaba5f7eddc2f867740ec69ece6d1946d29029a6 \ + --hash=sha256:f7a31fc1e1bd362874863fdeed71aed92d348f5336fd84f2197ba40c59f061bd \ + --hash=sha256:f9479aa06a793c5aeba49ce5c5692ffb51fcd9a7016e017d555d5e2b0045d212 + # via + # aiohttp + # yarl +proto-plus==1.25.0 \ + --hash=sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961 \ + --hash=sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91 # via # -r requirements.in # google-api-core -protobuf==5.28.0 \ - --hash=sha256:018db9056b9d75eb93d12a9d35120f97a84d9a919bcab11ed56ad2d399d6e8dd \ - --hash=sha256:510ed78cd0980f6d3218099e874714cdf0d8a95582e7b059b06cabad855ed0a0 \ - --hash=sha256:532627e8fdd825cf8767a2d2b94d77e874d5ddb0adefb04b237f7cc296748681 \ - --hash=sha256:6206afcb2d90181ae8722798dcb56dc76675ab67458ac24c0dd7d75d632ac9bd \ - --hash=sha256:66c3edeedb774a3508ae70d87b3a19786445fe9a068dd3585e0cefa8a77b83d0 \ - --hash=sha256:6d7cc9e60f976cf3e873acb9a40fed04afb5d224608ed5c1a105db4a3f09c5b6 \ - --hash=sha256:853db610214e77ee817ecf0514e0d1d052dff7f63a0c157aa6eabae98db8a8de \ - --hash=sha256:d001a73c8bc2bf5b5c1360d59dd7573744e163b3607fa92788b7f3d5fefbd9a5 \ - --hash=sha256:dde74af0fa774fa98892209992295adbfb91da3fa98c8f67a88afe8f5a349add \ - --hash=sha256:dde9fcaa24e7a9654f4baf2a55250b13a5ea701493d904c54069776b99a8216b \ - --hash=sha256:eef7a8a2f4318e2cb2dee8666d26e58eaf437c14788f3a2911d0c3da40405ae8 +protobuf==5.29.2 \ + --hash=sha256:13d6d617a2a9e0e82a88113d7191a1baa1e42c2cc6f5f1398d3b054c8e7e714a \ + --hash=sha256:2d2e674c58a06311c8e99e74be43e7f3a8d1e2b2fdf845eaa347fbd866f23355 \ + --hash=sha256:36000f97ea1e76e8398a3f02936aac2a5d2b111aae9920ec1b769fc4a222c4d9 \ + --hash=sha256:494229ecd8c9009dd71eda5fd57528395d1eacdf307dbece6c12ad0dd09e912e \ + --hash=sha256:842de6d9241134a973aab719ab42b008a18a90f9f07f06ba480df268f86432f9 \ + --hash=sha256:a0c53d78383c851bfa97eb42e3703aefdc96d2036a41482ffd55dc5f529466eb \ + --hash=sha256:b2cc8e8bb7c9326996f0e160137b0861f1a82162502658df2951209d0cb0309e \ + --hash=sha256:b6b0d416bbbb9d4fbf9d0561dbfc4e324fd522f61f7af0fe0f282ab67b22477e \ + --hash=sha256:c12ba8249f5624300cf51c3d0bfe5be71a60c63e4dcf51ffe9a68771d958c851 \ + --hash=sha256:e621a98c0201a7c8afe89d9646859859be97cb22b8bf1d8eacfd90d5bda2eb19 \ + --hash=sha256:fde4554c0e578a5a0bcc9a276339594848d1e89f9ea47b4427c80e5d72f90181 # via # -r requirements.in # google-api-core # googleapis-common-protos # grpc-google-iam-v1 # proto-plus -pyasn1==0.6.0 \ - --hash=sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c \ - --hash=sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473 +pyasn1==0.6.1 \ + --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \ + --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034 # via # pyasn1-modules # rsa -pyasn1-modules==0.4.0 \ - --hash=sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6 \ - --hash=sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b +pyasn1-modules==0.4.1 \ + --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ + --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c # via google-auth -pypandoc==1.13 \ - --hash=sha256:31652073c7960c2b03570bd1e94f602ca9bc3e70099df5ead4cea98ff5151c1e \ - --hash=sha256:4c7d71bf2f1ed122aac287113b5c4d537a33bbc3c1df5aed11a7d4a7ac074681 +pypandoc==1.14 \ + --hash=sha256:1315c7ad7fac7236dacf69a05b521ed2c3f1d0177f70e9b92bfffce6c023df22 \ + --hash=sha256:6b4c45f5f1b9fb5bb562079164806bdbbc3e837b5402bcf3f1139edc5730a197 # via -r requirements.in -pytest==8.3.2 \ - --hash=sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5 \ - --hash=sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce +pytest==8.3.4 \ + --hash=sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6 \ + --hash=sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761 # via pytest-asyncio -pytest-asyncio==0.24.0 \ - --hash=sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b \ - --hash=sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276 +pytest-asyncio==0.25.0 \ + --hash=sha256:8c0610303c9e0442a5db8604505fc0f545456ba1528824842b37b4a626cbf609 \ + --hash=sha256:db5432d18eac6b7e28b46dcd9b69921b55c3b1086e85febfe04e70b18d9e81b3 # via -r requirements.in -PyYAML==6.0.2 \ +pyyaml==6.0.2 \ --hash=sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff \ --hash=sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48 \ --hash=sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086 \ @@ -411,21 +822,129 @@ rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 # via google-auth -tomli==2.0.1 \ - --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ - --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f +tomli==2.2.1 \ + --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ + --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \ + --hash=sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c \ + --hash=sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b \ + --hash=sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8 \ + --hash=sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6 \ + --hash=sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77 \ + --hash=sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff \ + --hash=sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea \ + --hash=sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192 \ + --hash=sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249 \ + --hash=sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee \ + --hash=sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4 \ + --hash=sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98 \ + --hash=sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8 \ + --hash=sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4 \ + --hash=sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281 \ + --hash=sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744 \ + --hash=sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69 \ + --hash=sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13 \ + --hash=sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140 \ + --hash=sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e \ + --hash=sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e \ + --hash=sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc \ + --hash=sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff \ + --hash=sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec \ + --hash=sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2 \ + --hash=sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222 \ + --hash=sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106 \ + --hash=sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272 \ + --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ + --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 # via pytest typing-extensions==4.12.2 \ --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 - # via - # libcst - # typing-inspect -typing-inspect==0.9.0 \ - --hash=sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f \ - --hash=sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78 - # via libcst -urllib3==2.2.2 \ - --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ - --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 + # via multidict +urllib3==2.2.3 \ + --hash=sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac \ + --hash=sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9 # via requests +yarl==1.18.3 \ + --hash=sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba \ + --hash=sha256:02ddb6756f8f4517a2d5e99d8b2f272488e18dd0bfbc802f31c16c6c20f22193 \ + --hash=sha256:045b8482ce9483ada4f3f23b3774f4e1bf4f23a2d5c912ed5170f68efb053318 \ + --hash=sha256:09c7907c8548bcd6ab860e5f513e727c53b4a714f459b084f6580b49fa1b9cee \ + --hash=sha256:0b0cad37311123211dc91eadcb322ef4d4a66008d3e1bdc404808992260e1a0e \ + --hash=sha256:0b3c92fa08759dbf12b3a59579a4096ba9af8dd344d9a813fc7f5070d86bbab1 \ + --hash=sha256:0fb2171a4486bb075316ee754c6d8382ea6eb8b399d4ec62fde2b591f879778a \ + --hash=sha256:1a74a13a4c857a84a845505fd2d68e54826a2cd01935a96efb1e9d86c728e186 \ + --hash=sha256:1d407181cfa6e70077df3377938c08012d18893f9f20e92f7d2f314a437c30b1 \ + --hash=sha256:1dd4bdd05407ced96fed3d7f25dbbf88d2ffb045a0db60dbc247f5b3c5c25d50 \ + --hash=sha256:25b411eddcfd56a2f0cd6a384e9f4f7aa3efee14b188de13048c25b5e91f1640 \ + --hash=sha256:2d06d3005e668744e11ed80812e61efd77d70bb7f03e33c1598c301eea20efbb \ + --hash=sha256:2ec9bbba33b2d00999af4631a3397d1fd78290c48e2a3e52d8dd72db3a067ac8 \ + --hash=sha256:3236da9272872443f81fedc389bace88408f64f89f75d1bdb2256069a8730ccc \ + --hash=sha256:35098b24e0327fc4ebdc8ffe336cee0a87a700c24ffed13161af80124b7dc8e5 \ + --hash=sha256:41f7ce59d6ee7741af71d82020346af364949314ed3d87553763a2df1829cc58 \ + --hash=sha256:436c4fc0a4d66b2badc6c5fc5ef4e47bb10e4fd9bf0c79524ac719a01f3607c2 \ + --hash=sha256:4891ed92157e5430874dad17b15eb1fda57627710756c27422200c52d8a4e393 \ + --hash=sha256:4ac515b860c36becb81bb84b667466885096b5fc85596948548b667da3bf9f24 \ + --hash=sha256:5094d9206c64181d0f6e76ebd8fb2f8fe274950a63890ee9e0ebfd58bf9d787b \ + --hash=sha256:54d6921f07555713b9300bee9c50fb46e57e2e639027089b1d795ecd9f7fa910 \ + --hash=sha256:578e281c393af575879990861823ef19d66e2b1d0098414855dd367e234f5b3c \ + --hash=sha256:5a3f356548e34a70b0172d8890006c37be92995f62d95a07b4a42e90fba54272 \ + --hash=sha256:602d98f2c2d929f8e697ed274fbadc09902c4025c5a9963bf4e9edfc3ab6f7ed \ + --hash=sha256:61b1a825a13bef4a5f10b1885245377d3cd0bf87cba068e1d9a88c2ae36880e1 \ + --hash=sha256:61e5e68cb65ac8f547f6b5ef933f510134a6bf31bb178be428994b0cb46c2a04 \ + --hash=sha256:61ee62ead9b68b9123ec24bc866cbef297dd266175d53296e2db5e7f797f902d \ + --hash=sha256:6333c5a377c8e2f5fae35e7b8f145c617b02c939d04110c76f29ee3676b5f9a5 \ + --hash=sha256:6748dbf9bfa5ba1afcc7556b71cda0d7ce5f24768043a02a58846e4a443d808d \ + --hash=sha256:67a283dd2882ac98cc6318384f565bffc751ab564605959df4752d42483ad889 \ + --hash=sha256:75674776d96d7b851b6498f17824ba17849d790a44d282929c42dbb77d4f17ae \ + --hash=sha256:757e81cae69244257d125ff31663249b3013b5dc0a8520d73694aed497fb195b \ + --hash=sha256:77a6e85b90a7641d2e07184df5557132a337f136250caafc9ccaa4a2a998ca2c \ + --hash=sha256:7c33dd1931a95e5d9a772d0ac5e44cac8957eaf58e3c8da8c1414de7dd27c576 \ + --hash=sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34 \ + --hash=sha256:7e2ee16578af3b52ac2f334c3b1f92262f47e02cc6193c598502bd46f5cd1477 \ + --hash=sha256:80316a8bd5109320d38eef8833ccf5f89608c9107d02d2a7f985f98ed6876990 \ + --hash=sha256:82123d0c954dc58db301f5021a01854a85bf1f3bb7d12ae0c01afc414a882ca2 \ + --hash=sha256:84b2deecba4a3f1a398df819151eb72d29bfeb3b69abb145a00ddc8d30094512 \ + --hash=sha256:8503ad47387b8ebd39cbbbdf0bf113e17330ffd339ba1144074da24c545f0069 \ + --hash=sha256:877d209b6aebeb5b16c42cbb377f5f94d9e556626b1bfff66d7b0d115be88d0a \ + --hash=sha256:8874027a53e3aea659a6d62751800cf6e63314c160fd607489ba5c2edd753cf6 \ + --hash=sha256:88a19f62ff30117e706ebc9090b8ecc79aeb77d0b1f5ec10d2d27a12bc9f66d0 \ + --hash=sha256:8d39d351e7faf01483cc7ff7c0213c412e38e5a340238826be7e0e4da450fdc8 \ + --hash=sha256:90adb47ad432332d4f0bc28f83a5963f426ce9a1a8809f5e584e704b82685dcb \ + --hash=sha256:913829534200eb0f789d45349e55203a091f45c37a2674678744ae52fae23efa \ + --hash=sha256:93b2e109287f93db79210f86deb6b9bbb81ac32fc97236b16f7433db7fc437d8 \ + --hash=sha256:9d41beda9dc97ca9ab0b9888cb71f7539124bc05df02c0cff6e5acc5a19dcc6e \ + --hash=sha256:a440a2a624683108a1b454705ecd7afc1c3438a08e890a1513d468671d90a04e \ + --hash=sha256:a4bb030cf46a434ec0225bddbebd4b89e6471814ca851abb8696170adb163985 \ + --hash=sha256:a9ca04806f3be0ac6d558fffc2fdf8fcef767e0489d2684a21912cc4ed0cd1b8 \ + --hash=sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1 \ + --hash=sha256:ac36703a585e0929b032fbaab0707b75dc12703766d0b53486eabd5139ebadd5 \ + --hash=sha256:b1771de9944d875f1b98a745bc547e684b863abf8f8287da8466cf470ef52690 \ + --hash=sha256:b464c4ab4bfcb41e3bfd3f1c26600d038376c2de3297760dfe064d2cb7ea8e10 \ + --hash=sha256:b4f6450109834af88cb4cc5ecddfc5380ebb9c228695afc11915a0bf82116789 \ + --hash=sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b \ + --hash=sha256:b643562c12680b01e17239be267bc306bbc6aac1f34f6444d1bded0c5ce438ca \ + --hash=sha256:b958ddd075ddba5b09bb0be8a6d9906d2ce933aee81100db289badbeb966f54e \ + --hash=sha256:b9d60031cf568c627d028239693fd718025719c02c9f55df0a53e587aab951b5 \ + --hash=sha256:ba23302c0c61a9999784e73809427c9dbedd79f66a13d84ad1b1943802eaaf59 \ + --hash=sha256:ba87babd629f8af77f557b61e49e7c7cac36f22f871156b91e10a6e9d4f829e9 \ + --hash=sha256:c017a3b6df3a1bd45b9fa49a0f54005e53fbcad16633870104b66fa1a30a29d8 \ + --hash=sha256:c1e1cc06da1491e6734f0ea1e6294ce00792193c463350626571c287c9a704db \ + --hash=sha256:c654d5207c78e0bd6d749f6dae1dcbbfde3403ad3a4b11f3c5544d9906969dde \ + --hash=sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7 \ + --hash=sha256:c7d79f7d9aabd6011004e33b22bc13056a3e3fb54794d138af57f5ee9d9032cb \ + --hash=sha256:ccaa3a4b521b780a7e771cc336a2dba389a0861592bbce09a476190bb0c8b4b3 \ + --hash=sha256:ccd17349166b1bee6e529b4add61727d3f55edb7babbe4069b5764c9587a8cc6 \ + --hash=sha256:ce1af883b94304f493698b00d0f006d56aea98aeb49d75ec7d98cd4a777e9285 \ + --hash=sha256:d0e883008013c0e4aef84dcfe2a0b172c4d23c2669412cf5b3371003941f72bb \ + --hash=sha256:d980e0325b6eddc81331d3f4551e2a333999fb176fd153e075c6d1c2530aa8a8 \ + --hash=sha256:e17c9361d46a4d5addf777c6dd5eab0715a7684c2f11b88c67ac37edfba6c482 \ + --hash=sha256:e2c08cc9b16f4f4bc522771d96734c7901e7ebef70c6c5c35dd0f10845270bcd \ + --hash=sha256:e35ef8683211db69ffe129a25d5634319a677570ab6b2eba4afa860f54eeaf75 \ + --hash=sha256:e3b9fd71836999aad54084906f8663dffcd2a7fb5cdafd6c37713b2e72be1760 \ + --hash=sha256:ef9f7768395923c3039055c14334ba4d926f3baf7b776c923c93d80195624782 \ + --hash=sha256:f52a265001d830bc425f82ca9eabda94a64a4d753b07d623a9f2863fde532b53 \ + --hash=sha256:f91c4803173928a25e1a55b943c81f55b8872f0018be83e3ad4938adffb77dd2 \ + --hash=sha256:fbd6748e8ab9b41171bb95c6142faf068f5ef1511935a0aa07025438dd9a9bc1 \ + --hash=sha256:fe57328fbc1bfd0bd0514470ac692630f3901c0ee39052ae47acd1d90a436719 \ + --hash=sha256:fea09ca13323376a2fdfb353a5fa2e59f90cd18d7ca4eaa1fd31f0a8b4f91e62 + # via aiohttp diff --git a/rules_python_gapic/py_gapic.bzl b/rules_python_gapic/py_gapic.bzl index cf5e225c65..5b11375d2e 100644 --- a/rules_python_gapic/py_gapic.bzl +++ b/rules_python_gapic/py_gapic.bzl @@ -94,6 +94,7 @@ def py_gapic_library( requirement("google-api-core"), requirement("googleapis-common-protos"), requirement("pytest-asyncio"), + requirement("aiohttp") ] py_library( diff --git a/setup.py b/setup.py index 5c794454d8..b2eb1cafd1 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.18.5" +version = "1.21.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the @@ -76,6 +76,7 @@ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Operating System :: OS Independent", "Topic :: Software Development :: Code Generators", "Topic :: Software Development :: Libraries :: Python Modules", diff --git a/tests/fragments/test_reserved_field_name.proto b/tests/fragments/test_reserved_field_name.proto index 9fee7912c9..f11b3416ce 100644 --- a/tests/fragments/test_reserved_field_name.proto +++ b/tests/fragments/test_reserved_field_name.proto @@ -34,6 +34,10 @@ message MethodRequest { string any = 4; string license = 5; string type = 6; + // Refer to PEP8 https://peps.python.org/pep-0008/#function-and-method-arguments + string self = 7; + // Refer to PEP8 https://peps.python.org/pep-0008/#function-and-method-arguments + string cls = 8; } message MethodResponse { diff --git a/tests/fragments/test_reserved_method_names.proto b/tests/fragments/test_reserved_method_names.proto index d8f23494fe..ba89ef0f25 100644 --- a/tests/fragments/test_reserved_method_names.proto +++ b/tests/fragments/test_reserved_method_names.proto @@ -30,6 +30,13 @@ service MyService { }; }; + rpc Import(CreateImportRequest) returns (CreateImportResponse) { + option (google.api.http) = { + body: "*" + post: "/import/v1" + }; + }; + rpc GrpcChannel(GrpcChannelRequest) returns (GrpcChannelResponse) { option (google.api.http) = { body: "*" @@ -59,6 +66,14 @@ message CreateChannelResponse { string info = 1; } +message CreateImportRequest { + string info = 1; +} + +message CreateImportResponse { + string info = 1; +} + message GrpcChannelRequest { string grpc_channel = 1; string info = 2; diff --git a/tests/integration/goldens/asset/docs/index.rst b/tests/integration/goldens/asset/docs/index.rst index fee6608ede..df4eb53564 100755 --- a/tests/integration/goldens/asset/docs/index.rst +++ b/tests/integration/goldens/asset/docs/index.rst @@ -3,5 +3,5 @@ API Reference .. toctree:: :maxdepth: 2 - asset_v1/services - asset_v1/types + asset_v1/services_ + asset_v1/types_ diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 2bd35d94b7..334e5067ff 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union @@ -46,6 +47,13 @@ from .transports.grpc_asyncio import AssetServiceGrpcAsyncIOTransport from .client import AssetServiceClient +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) class AssetServiceAsyncClient: """Asset service definition.""" @@ -247,12 +255,26 @@ def __init__(self, *, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.asset_v1.AssetServiceAsyncClient`.", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.cloud.asset.v1.AssetService", + "credentialsType": None, + } + ) + async def export_assets(self, request: Optional[Union[asset_service.ExportAssetsRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Exports assets with time and resource types to a given Cloud Storage location/BigQuery table. For Cloud Storage location @@ -308,8 +330,10 @@ async def sample_export_assets(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -368,7 +392,7 @@ async def list_assets(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListAssetsAsyncPager: r"""Lists assets with time and resource types and returns paged results in response. @@ -418,8 +442,10 @@ async def sample_list_assets(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.services.asset_service.pagers.ListAssetsAsyncPager: @@ -490,7 +516,7 @@ async def batch_get_assets_history(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.BatchGetAssetsHistoryResponse: r"""Batch gets the update history of assets that overlap a time window. For IAM_POLICY content, this API outputs history when @@ -532,8 +558,10 @@ async def sample_batch_get_assets_history(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.BatchGetAssetsHistoryResponse: @@ -577,7 +605,7 @@ async def create_feed(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.Feed: r"""Creates a feed in a parent project/folder/organization to listen to its asset @@ -634,8 +662,10 @@ async def sample_create_feed(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.Feed: @@ -699,7 +729,7 @@ async def get_feed(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.Feed: r"""Gets details about an asset feed. @@ -744,8 +774,10 @@ async def sample_get_feed(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.Feed: @@ -809,7 +841,7 @@ async def list_feeds(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.ListFeedsResponse: r"""Lists all asset feeds in a parent project/folder/organization. @@ -857,8 +889,10 @@ async def sample_list_feeds(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.ListFeedsResponse: @@ -914,7 +948,7 @@ async def update_feed(self, feed: Optional[asset_service.Feed] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.Feed: r"""Updates an asset feed configuration. @@ -963,8 +997,10 @@ async def sample_update_feed(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.Feed: @@ -1028,7 +1064,7 @@ async def delete_feed(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes an asset feed. @@ -1070,8 +1106,10 @@ async def sample_delete_feed(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -1122,7 +1160,7 @@ async def search_all_resources(self, asset_types: Optional[MutableSequence[str]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.SearchAllResourcesAsyncPager: r"""Searches all Google Cloud resources within the specified scope, such as a project, folder, or organization. The caller must be @@ -1273,8 +1311,10 @@ async def sample_search_all_resources(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.services.asset_service.pagers.SearchAllResourcesAsyncPager: @@ -1351,7 +1391,7 @@ async def search_all_iam_policies(self, query: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.SearchAllIamPoliciesAsyncPager: r"""Searches all IAM policies within the specified scope, such as a project, folder, or organization. The caller must be granted the @@ -1466,8 +1506,10 @@ async def sample_search_all_iam_policies(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.services.asset_service.pagers.SearchAllIamPoliciesAsyncPager: @@ -1540,7 +1582,7 @@ async def analyze_iam_policy(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.AnalyzeIamPolicyResponse: r"""Analyzes IAM policies to answer which identities have what accesses on which resources. @@ -1581,8 +1623,10 @@ async def sample_analyze_iam_policy(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.AnalyzeIamPolicyResponse: @@ -1627,7 +1671,7 @@ async def analyze_iam_policy_longrunning(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Analyzes IAM policies asynchronously to answer which identities have what accesses on which resources, and writes the analysis @@ -1686,8 +1730,10 @@ async def sample_analyze_iam_policy_longrunning(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -1742,7 +1788,7 @@ async def analyze_move(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.AnalyzeMoveResponse: r"""Analyze moving a resource to a specified destination without kicking off the actual move. The analysis is @@ -1786,8 +1832,10 @@ async def sample_analyze_move(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.AnalyzeMoveResponse: @@ -1832,7 +1880,7 @@ async def query_assets(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.QueryAssetsResponse: r"""Issue a job that queries assets using a SQL statement compatible with `BigQuery Standard @@ -1884,8 +1932,10 @@ async def sample_query_assets(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.QueryAssetsResponse: @@ -1931,7 +1981,7 @@ async def create_saved_query(self, saved_query_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.SavedQuery: r"""Creates a saved query in a parent project/folder/organization. @@ -2003,8 +2053,10 @@ async def sample_create_saved_query(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.SavedQuery: @@ -2066,7 +2118,7 @@ async def get_saved_query(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.SavedQuery: r"""Gets details about a saved query. @@ -2113,8 +2165,10 @@ async def sample_get_saved_query(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.SavedQuery: @@ -2172,7 +2226,7 @@ async def list_saved_queries(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListSavedQueriesAsyncPager: r"""Lists all saved queries in a parent project/folder/organization. @@ -2222,8 +2276,10 @@ async def sample_list_saved_queries(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.services.asset_service.pagers.ListSavedQueriesAsyncPager: @@ -2296,7 +2352,7 @@ async def update_saved_query(self, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.SavedQuery: r"""Updates a saved query. @@ -2351,8 +2407,10 @@ async def sample_update_saved_query(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.SavedQuery: @@ -2412,7 +2470,7 @@ async def delete_saved_query(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a saved query. @@ -2456,8 +2514,10 @@ async def sample_delete_saved_query(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -2505,7 +2565,7 @@ async def batch_get_effective_iam_policies(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: r"""Gets effective IAM policies for a batch of resources. @@ -2543,8 +2603,10 @@ async def sample_batch_get_effective_iam_policies(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesResponse: @@ -2592,7 +2654,7 @@ async def analyze_org_policies(self, filter: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.AnalyzeOrgPoliciesAsyncPager: r"""Analyzes organization policies under a scope. @@ -2665,8 +2727,10 @@ async def sample_analyze_org_policies(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPoliciesAsyncPager: @@ -2744,7 +2808,7 @@ async def analyze_org_policy_governed_containers(self, filter: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager: r"""Analyzes organization policies governed containers (projects, folders or organization) under a scope. @@ -2817,8 +2881,10 @@ async def sample_analyze_org_policy_governed_containers(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager: @@ -2896,7 +2962,7 @@ async def analyze_org_policy_governed_assets(self, filter: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager: r"""Analyzes organization policies governed assets (Google Cloud resources or policies) under a scope. This RPC supports custom @@ -2998,8 +3064,10 @@ async def sample_analyze_org_policy_governed_assets(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager: @@ -3075,7 +3143,7 @@ async def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -3086,8 +3154,10 @@ async def get_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -3100,11 +3170,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 95dd42b709..337ffecbbf 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -16,6 +16,7 @@ from collections import OrderedDict from http import HTTPStatus import json +import logging as std_logging import os import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast @@ -38,6 +39,14 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.asset_v1.services.asset_service import pagers @@ -462,33 +471,6 @@ def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_ raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes(client_universe: str, - credentials: ga_credentials.Credentials) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AssetServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError("The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default.") - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -498,9 +480,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = (self._is_universe_domain_valid or - AssetServiceClient._compare_universes(self.universe_domain, self.transport._credentials)) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True def _add_cred_info_for_auth_errors( self, @@ -615,6 +597,10 @@ def __init__(self, *, # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError("client_options.api_key and credentials are mutually exclusive") @@ -667,12 +653,27 @@ def __init__(self, *, api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.asset_v1.AssetServiceClient`.", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.cloud.asset.v1.AssetService", + "credentialsType": None, + } + ) + def export_assets(self, request: Optional[Union[asset_service.ExportAssetsRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Exports assets with time and resource types to a given Cloud Storage location/BigQuery table. For Cloud Storage location @@ -728,8 +729,10 @@ def sample_export_assets(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -752,7 +755,7 @@ def sample_export_assets(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.export_assets] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -788,7 +791,7 @@ def list_assets(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListAssetsPager: r"""Lists assets with time and resource types and returns paged results in response. @@ -838,8 +841,10 @@ def sample_list_assets(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.services.asset_service.pagers.ListAssetsPager: @@ -871,7 +876,7 @@ def sample_list_assets(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_assets] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -909,7 +914,7 @@ def batch_get_assets_history(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.BatchGetAssetsHistoryResponse: r"""Batch gets the update history of assets that overlap a time window. For IAM_POLICY content, this API outputs history when @@ -951,8 +956,10 @@ def sample_batch_get_assets_history(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.BatchGetAssetsHistoryResponse: @@ -968,7 +975,7 @@ def sample_batch_get_assets_history(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.batch_get_assets_history] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -996,7 +1003,7 @@ def create_feed(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.Feed: r"""Creates a feed in a parent project/folder/organization to listen to its asset @@ -1053,8 +1060,10 @@ def sample_create_feed(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.Feed: @@ -1089,7 +1098,7 @@ def sample_create_feed(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_feed] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1117,7 +1126,7 @@ def get_feed(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.Feed: r"""Gets details about an asset feed. @@ -1162,8 +1171,10 @@ def sample_get_feed(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.Feed: @@ -1198,7 +1209,7 @@ def sample_get_feed(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_feed] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1226,7 +1237,7 @@ def list_feeds(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.ListFeedsResponse: r"""Lists all asset feeds in a parent project/folder/organization. @@ -1274,8 +1285,10 @@ def sample_list_feeds(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.ListFeedsResponse: @@ -1302,7 +1315,7 @@ def sample_list_feeds(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_feeds] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1330,7 +1343,7 @@ def update_feed(self, feed: Optional[asset_service.Feed] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.Feed: r"""Updates an asset feed configuration. @@ -1379,8 +1392,10 @@ def sample_update_feed(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.Feed: @@ -1415,7 +1430,7 @@ def sample_update_feed(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_feed] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1443,7 +1458,7 @@ def delete_feed(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes an asset feed. @@ -1485,8 +1500,10 @@ def sample_delete_feed(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -1509,7 +1526,7 @@ def sample_delete_feed(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_feed] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1536,7 +1553,7 @@ def search_all_resources(self, asset_types: Optional[MutableSequence[str]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.SearchAllResourcesPager: r"""Searches all Google Cloud resources within the specified scope, such as a project, folder, or organization. The caller must be @@ -1687,8 +1704,10 @@ def sample_search_all_resources(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.services.asset_service.pagers.SearchAllResourcesPager: @@ -1724,7 +1743,7 @@ def sample_search_all_resources(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.search_all_resources] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1764,7 +1783,7 @@ def search_all_iam_policies(self, query: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.SearchAllIamPoliciesPager: r"""Searches all IAM policies within the specified scope, such as a project, folder, or organization. The caller must be granted the @@ -1879,8 +1898,10 @@ def sample_search_all_iam_policies(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.services.asset_service.pagers.SearchAllIamPoliciesPager: @@ -1914,7 +1935,7 @@ def sample_search_all_iam_policies(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.search_all_iam_policies] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1952,7 +1973,7 @@ def analyze_iam_policy(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.AnalyzeIamPolicyResponse: r"""Analyzes IAM policies to answer which identities have what accesses on which resources. @@ -1993,8 +2014,10 @@ def sample_analyze_iam_policy(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.AnalyzeIamPolicyResponse: @@ -2012,7 +2035,7 @@ def sample_analyze_iam_policy(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.analyze_iam_policy] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2039,7 +2062,7 @@ def analyze_iam_policy_longrunning(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Analyzes IAM policies asynchronously to answer which identities have what accesses on which resources, and writes the analysis @@ -2098,8 +2121,10 @@ def sample_analyze_iam_policy_longrunning(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -2119,7 +2144,7 @@ def sample_analyze_iam_policy_longrunning(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.analyze_iam_policy_longrunning] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2154,7 +2179,7 @@ def analyze_move(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.AnalyzeMoveResponse: r"""Analyze moving a resource to a specified destination without kicking off the actual move. The analysis is @@ -2198,8 +2223,10 @@ def sample_analyze_move(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.AnalyzeMoveResponse: @@ -2217,7 +2244,7 @@ def sample_analyze_move(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.analyze_move] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2244,7 +2271,7 @@ def query_assets(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.QueryAssetsResponse: r"""Issue a job that queries assets using a SQL statement compatible with `BigQuery Standard @@ -2296,8 +2323,10 @@ def sample_query_assets(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.QueryAssetsResponse: @@ -2313,7 +2342,7 @@ def sample_query_assets(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.query_assets] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2343,7 +2372,7 @@ def create_saved_query(self, saved_query_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.SavedQuery: r"""Creates a saved query in a parent project/folder/organization. @@ -2415,8 +2444,10 @@ def sample_create_saved_query(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.SavedQuery: @@ -2449,7 +2480,7 @@ def sample_create_saved_query(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_saved_query] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2477,7 +2508,7 @@ def get_saved_query(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.SavedQuery: r"""Gets details about a saved query. @@ -2524,8 +2555,10 @@ def sample_get_saved_query(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.SavedQuery: @@ -2554,7 +2587,7 @@ def sample_get_saved_query(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_saved_query] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2582,7 +2615,7 @@ def list_saved_queries(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListSavedQueriesPager: r"""Lists all saved queries in a parent project/folder/organization. @@ -2632,8 +2665,10 @@ def sample_list_saved_queries(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.services.asset_service.pagers.ListSavedQueriesPager: @@ -2665,7 +2700,7 @@ def sample_list_saved_queries(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_saved_queries] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2705,7 +2740,7 @@ def update_saved_query(self, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.SavedQuery: r"""Updates a saved query. @@ -2760,8 +2795,10 @@ def sample_update_saved_query(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.SavedQuery: @@ -2792,7 +2829,7 @@ def sample_update_saved_query(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_saved_query] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2820,7 +2857,7 @@ def delete_saved_query(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a saved query. @@ -2864,8 +2901,10 @@ def sample_delete_saved_query(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -2888,7 +2927,7 @@ def sample_delete_saved_query(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_saved_query] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2912,7 +2951,7 @@ def batch_get_effective_iam_policies(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: r"""Gets effective IAM policies for a batch of resources. @@ -2950,8 +2989,10 @@ def sample_batch_get_effective_iam_policies(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesResponse: @@ -2969,7 +3010,7 @@ def sample_batch_get_effective_iam_policies(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.batch_get_effective_iam_policies] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2999,7 +3040,7 @@ def analyze_org_policies(self, filter: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.AnalyzeOrgPoliciesPager: r"""Analyzes organization policies under a scope. @@ -3072,8 +3113,10 @@ def sample_analyze_org_policies(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPoliciesPager: @@ -3109,7 +3152,7 @@ def sample_analyze_org_policies(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.analyze_org_policies] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -3150,7 +3193,7 @@ def analyze_org_policy_governed_containers(self, filter: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.AnalyzeOrgPolicyGovernedContainersPager: r"""Analyzes organization policies governed containers (projects, folders or organization) under a scope. @@ -3223,8 +3266,10 @@ def sample_analyze_org_policy_governed_containers(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedContainersPager: @@ -3260,7 +3305,7 @@ def sample_analyze_org_policy_governed_containers(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.analyze_org_policy_governed_containers] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -3301,7 +3346,7 @@ def analyze_org_policy_governed_assets(self, filter: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.AnalyzeOrgPolicyGovernedAssetsPager: r"""Analyzes organization policies governed assets (Google Cloud resources or policies) under a scope. This RPC supports custom @@ -3403,8 +3448,10 @@ def sample_analyze_org_policy_governed_assets(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedAssetsPager: @@ -3440,7 +3487,7 @@ def sample_analyze_org_policy_governed_assets(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.analyze_org_policy_governed_assets] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -3492,7 +3539,7 @@ def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -3503,8 +3550,10 @@ def get_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -3517,11 +3566,7 @@ def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py index 1498b1ab97..5620c82f31 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py @@ -52,7 +52,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -65,8 +65,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = asset_service.ListAssetsRequest(request) @@ -118,7 +120,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -131,8 +133,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = asset_service.ListAssetsRequest(request) @@ -187,7 +191,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -200,8 +204,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = asset_service.SearchAllResourcesRequest(request) @@ -253,7 +259,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -266,8 +272,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = asset_service.SearchAllResourcesRequest(request) @@ -322,7 +330,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -335,8 +343,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = asset_service.SearchAllIamPoliciesRequest(request) @@ -388,7 +398,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -401,8 +411,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = asset_service.SearchAllIamPoliciesRequest(request) @@ -457,7 +469,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -470,8 +482,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = asset_service.ListSavedQueriesRequest(request) @@ -523,7 +537,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -536,8 +550,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = asset_service.ListSavedQueriesRequest(request) @@ -592,7 +608,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -605,8 +621,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = asset_service.AnalyzeOrgPoliciesRequest(request) @@ -658,7 +676,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -671,8 +689,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = asset_service.AnalyzeOrgPoliciesRequest(request) @@ -727,7 +747,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -740,8 +760,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest(request) @@ -793,7 +815,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -806,8 +828,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest(request) @@ -862,7 +886,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -875,8 +899,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(request) @@ -928,7 +954,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -941,8 +967,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(request) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index 40735ff6b5..a1690041f7 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -306,6 +306,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index 63790c16d1..519ecdec2b 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -22,14 +25,85 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.cloud.asset_v1.types import asset_service from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class AssetServiceGrpcTransport(AssetServiceTransport): """gRPC backend transport for AssetService. @@ -181,7 +255,10 @@ def __init__(self, *, ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -245,7 +322,7 @@ def operations_client(self) -> operations_v1.OperationsClient: # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsClient( - self.grpc_channel + self._logged_channel ) # Return the client from cache. @@ -282,7 +359,7 @@ def export_assets(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'export_assets' not in self._stubs: - self._stubs['export_assets'] = self.grpc_channel.unary_unary( + self._stubs['export_assets'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/ExportAssets', request_serializer=asset_service.ExportAssetsRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -309,7 +386,7 @@ def list_assets(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_assets' not in self._stubs: - self._stubs['list_assets'] = self.grpc_channel.unary_unary( + self._stubs['list_assets'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/ListAssets', request_serializer=asset_service.ListAssetsRequest.serialize, response_deserializer=asset_service.ListAssetsResponse.deserialize, @@ -341,7 +418,7 @@ def batch_get_assets_history(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'batch_get_assets_history' not in self._stubs: - self._stubs['batch_get_assets_history'] = self.grpc_channel.unary_unary( + self._stubs['batch_get_assets_history'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/BatchGetAssetsHistory', request_serializer=asset_service.BatchGetAssetsHistoryRequest.serialize, response_deserializer=asset_service.BatchGetAssetsHistoryResponse.deserialize, @@ -369,7 +446,7 @@ def create_feed(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_feed' not in self._stubs: - self._stubs['create_feed'] = self.grpc_channel.unary_unary( + self._stubs['create_feed'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/CreateFeed', request_serializer=asset_service.CreateFeedRequest.serialize, response_deserializer=asset_service.Feed.deserialize, @@ -395,7 +472,7 @@ def get_feed(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_feed' not in self._stubs: - self._stubs['get_feed'] = self.grpc_channel.unary_unary( + self._stubs['get_feed'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/GetFeed', request_serializer=asset_service.GetFeedRequest.serialize, response_deserializer=asset_service.Feed.deserialize, @@ -422,7 +499,7 @@ def list_feeds(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_feeds' not in self._stubs: - self._stubs['list_feeds'] = self.grpc_channel.unary_unary( + self._stubs['list_feeds'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/ListFeeds', request_serializer=asset_service.ListFeedsRequest.serialize, response_deserializer=asset_service.ListFeedsResponse.deserialize, @@ -448,7 +525,7 @@ def update_feed(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_feed' not in self._stubs: - self._stubs['update_feed'] = self.grpc_channel.unary_unary( + self._stubs['update_feed'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/UpdateFeed', request_serializer=asset_service.UpdateFeedRequest.serialize, response_deserializer=asset_service.Feed.deserialize, @@ -474,7 +551,7 @@ def delete_feed(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_feed' not in self._stubs: - self._stubs['delete_feed'] = self.grpc_channel.unary_unary( + self._stubs['delete_feed'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/DeleteFeed', request_serializer=asset_service.DeleteFeedRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -503,7 +580,7 @@ def search_all_resources(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'search_all_resources' not in self._stubs: - self._stubs['search_all_resources'] = self.grpc_channel.unary_unary( + self._stubs['search_all_resources'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/SearchAllResources', request_serializer=asset_service.SearchAllResourcesRequest.serialize, response_deserializer=asset_service.SearchAllResourcesResponse.deserialize, @@ -532,7 +609,7 @@ def search_all_iam_policies(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'search_all_iam_policies' not in self._stubs: - self._stubs['search_all_iam_policies'] = self.grpc_channel.unary_unary( + self._stubs['search_all_iam_policies'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/SearchAllIamPolicies', request_serializer=asset_service.SearchAllIamPoliciesRequest.serialize, response_deserializer=asset_service.SearchAllIamPoliciesResponse.deserialize, @@ -559,7 +636,7 @@ def analyze_iam_policy(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'analyze_iam_policy' not in self._stubs: - self._stubs['analyze_iam_policy'] = self.grpc_channel.unary_unary( + self._stubs['analyze_iam_policy'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/AnalyzeIamPolicy', request_serializer=asset_service.AnalyzeIamPolicyRequest.serialize, response_deserializer=asset_service.AnalyzeIamPolicyResponse.deserialize, @@ -596,7 +673,7 @@ def analyze_iam_policy_longrunning(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'analyze_iam_policy_longrunning' not in self._stubs: - self._stubs['analyze_iam_policy_longrunning'] = self.grpc_channel.unary_unary( + self._stubs['analyze_iam_policy_longrunning'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/AnalyzeIamPolicyLongrunning', request_serializer=asset_service.AnalyzeIamPolicyLongrunningRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -628,7 +705,7 @@ def analyze_move(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'analyze_move' not in self._stubs: - self._stubs['analyze_move'] = self.grpc_channel.unary_unary( + self._stubs['analyze_move'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/AnalyzeMove', request_serializer=asset_service.AnalyzeMoveRequest.serialize, response_deserializer=asset_service.AnalyzeMoveResponse.deserialize, @@ -669,7 +746,7 @@ def query_assets(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'query_assets' not in self._stubs: - self._stubs['query_assets'] = self.grpc_channel.unary_unary( + self._stubs['query_assets'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/QueryAssets', request_serializer=asset_service.QueryAssetsRequest.serialize, response_deserializer=asset_service.QueryAssetsResponse.deserialize, @@ -696,7 +773,7 @@ def create_saved_query(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_saved_query' not in self._stubs: - self._stubs['create_saved_query'] = self.grpc_channel.unary_unary( + self._stubs['create_saved_query'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/CreateSavedQuery', request_serializer=asset_service.CreateSavedQueryRequest.serialize, response_deserializer=asset_service.SavedQuery.deserialize, @@ -722,7 +799,7 @@ def get_saved_query(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_saved_query' not in self._stubs: - self._stubs['get_saved_query'] = self.grpc_channel.unary_unary( + self._stubs['get_saved_query'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/GetSavedQuery', request_serializer=asset_service.GetSavedQueryRequest.serialize, response_deserializer=asset_service.SavedQuery.deserialize, @@ -749,7 +826,7 @@ def list_saved_queries(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_saved_queries' not in self._stubs: - self._stubs['list_saved_queries'] = self.grpc_channel.unary_unary( + self._stubs['list_saved_queries'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/ListSavedQueries', request_serializer=asset_service.ListSavedQueriesRequest.serialize, response_deserializer=asset_service.ListSavedQueriesResponse.deserialize, @@ -775,7 +852,7 @@ def update_saved_query(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_saved_query' not in self._stubs: - self._stubs['update_saved_query'] = self.grpc_channel.unary_unary( + self._stubs['update_saved_query'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/UpdateSavedQuery', request_serializer=asset_service.UpdateSavedQueryRequest.serialize, response_deserializer=asset_service.SavedQuery.deserialize, @@ -801,7 +878,7 @@ def delete_saved_query(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_saved_query' not in self._stubs: - self._stubs['delete_saved_query'] = self.grpc_channel.unary_unary( + self._stubs['delete_saved_query'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/DeleteSavedQuery', request_serializer=asset_service.DeleteSavedQueryRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -828,7 +905,7 @@ def batch_get_effective_iam_policies(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'batch_get_effective_iam_policies' not in self._stubs: - self._stubs['batch_get_effective_iam_policies'] = self.grpc_channel.unary_unary( + self._stubs['batch_get_effective_iam_policies'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/BatchGetEffectiveIamPolicies', request_serializer=asset_service.BatchGetEffectiveIamPoliciesRequest.serialize, response_deserializer=asset_service.BatchGetEffectiveIamPoliciesResponse.deserialize, @@ -854,7 +931,7 @@ def analyze_org_policies(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'analyze_org_policies' not in self._stubs: - self._stubs['analyze_org_policies'] = self.grpc_channel.unary_unary( + self._stubs['analyze_org_policies'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicies', request_serializer=asset_service.AnalyzeOrgPoliciesRequest.serialize, response_deserializer=asset_service.AnalyzeOrgPoliciesResponse.deserialize, @@ -882,7 +959,7 @@ def analyze_org_policy_governed_containers(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'analyze_org_policy_governed_containers' not in self._stubs: - self._stubs['analyze_org_policy_governed_containers'] = self.grpc_channel.unary_unary( + self._stubs['analyze_org_policy_governed_containers'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedContainers', request_serializer=asset_service.AnalyzeOrgPolicyGovernedContainersRequest.serialize, response_deserializer=asset_service.AnalyzeOrgPolicyGovernedContainersResponse.deserialize, @@ -927,7 +1004,7 @@ def analyze_org_policy_governed_assets(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'analyze_org_policy_governed_assets' not in self._stubs: - self._stubs['analyze_org_policy_governed_assets'] = self.grpc_channel.unary_unary( + self._stubs['analyze_org_policy_governed_assets'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedAssets', request_serializer=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.serialize, response_deserializer=asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.deserialize, @@ -935,7 +1012,7 @@ def analyze_org_policy_governed_assets(self) -> Callable[ return self._stubs['analyze_org_policy_governed_assets'] def close(self): - self.grpc_channel.close() + self._logged_channel.close() @property def get_operation( @@ -948,7 +1025,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index 45cafd05fb..54666f93ec 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -14,6 +14,9 @@ # limitations under the License. # import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -24,8 +27,11 @@ from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.asset_v1.types import asset_service @@ -34,6 +40,73 @@ from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO from .grpc import AssetServiceGrpcTransport +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class AssetServiceGrpcAsyncIOTransport(AssetServiceTransport): """gRPC AsyncIO backend transport for AssetService. @@ -227,8 +300,11 @@ def __init__(self, *, ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -251,7 +327,7 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel + self._logged_channel ) # Return the client from cache. @@ -288,7 +364,7 @@ def export_assets(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'export_assets' not in self._stubs: - self._stubs['export_assets'] = self.grpc_channel.unary_unary( + self._stubs['export_assets'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/ExportAssets', request_serializer=asset_service.ExportAssetsRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -315,7 +391,7 @@ def list_assets(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_assets' not in self._stubs: - self._stubs['list_assets'] = self.grpc_channel.unary_unary( + self._stubs['list_assets'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/ListAssets', request_serializer=asset_service.ListAssetsRequest.serialize, response_deserializer=asset_service.ListAssetsResponse.deserialize, @@ -347,7 +423,7 @@ def batch_get_assets_history(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'batch_get_assets_history' not in self._stubs: - self._stubs['batch_get_assets_history'] = self.grpc_channel.unary_unary( + self._stubs['batch_get_assets_history'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/BatchGetAssetsHistory', request_serializer=asset_service.BatchGetAssetsHistoryRequest.serialize, response_deserializer=asset_service.BatchGetAssetsHistoryResponse.deserialize, @@ -375,7 +451,7 @@ def create_feed(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_feed' not in self._stubs: - self._stubs['create_feed'] = self.grpc_channel.unary_unary( + self._stubs['create_feed'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/CreateFeed', request_serializer=asset_service.CreateFeedRequest.serialize, response_deserializer=asset_service.Feed.deserialize, @@ -401,7 +477,7 @@ def get_feed(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_feed' not in self._stubs: - self._stubs['get_feed'] = self.grpc_channel.unary_unary( + self._stubs['get_feed'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/GetFeed', request_serializer=asset_service.GetFeedRequest.serialize, response_deserializer=asset_service.Feed.deserialize, @@ -428,7 +504,7 @@ def list_feeds(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_feeds' not in self._stubs: - self._stubs['list_feeds'] = self.grpc_channel.unary_unary( + self._stubs['list_feeds'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/ListFeeds', request_serializer=asset_service.ListFeedsRequest.serialize, response_deserializer=asset_service.ListFeedsResponse.deserialize, @@ -454,7 +530,7 @@ def update_feed(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_feed' not in self._stubs: - self._stubs['update_feed'] = self.grpc_channel.unary_unary( + self._stubs['update_feed'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/UpdateFeed', request_serializer=asset_service.UpdateFeedRequest.serialize, response_deserializer=asset_service.Feed.deserialize, @@ -480,7 +556,7 @@ def delete_feed(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_feed' not in self._stubs: - self._stubs['delete_feed'] = self.grpc_channel.unary_unary( + self._stubs['delete_feed'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/DeleteFeed', request_serializer=asset_service.DeleteFeedRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -509,7 +585,7 @@ def search_all_resources(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'search_all_resources' not in self._stubs: - self._stubs['search_all_resources'] = self.grpc_channel.unary_unary( + self._stubs['search_all_resources'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/SearchAllResources', request_serializer=asset_service.SearchAllResourcesRequest.serialize, response_deserializer=asset_service.SearchAllResourcesResponse.deserialize, @@ -538,7 +614,7 @@ def search_all_iam_policies(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'search_all_iam_policies' not in self._stubs: - self._stubs['search_all_iam_policies'] = self.grpc_channel.unary_unary( + self._stubs['search_all_iam_policies'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/SearchAllIamPolicies', request_serializer=asset_service.SearchAllIamPoliciesRequest.serialize, response_deserializer=asset_service.SearchAllIamPoliciesResponse.deserialize, @@ -565,7 +641,7 @@ def analyze_iam_policy(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'analyze_iam_policy' not in self._stubs: - self._stubs['analyze_iam_policy'] = self.grpc_channel.unary_unary( + self._stubs['analyze_iam_policy'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/AnalyzeIamPolicy', request_serializer=asset_service.AnalyzeIamPolicyRequest.serialize, response_deserializer=asset_service.AnalyzeIamPolicyResponse.deserialize, @@ -602,7 +678,7 @@ def analyze_iam_policy_longrunning(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'analyze_iam_policy_longrunning' not in self._stubs: - self._stubs['analyze_iam_policy_longrunning'] = self.grpc_channel.unary_unary( + self._stubs['analyze_iam_policy_longrunning'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/AnalyzeIamPolicyLongrunning', request_serializer=asset_service.AnalyzeIamPolicyLongrunningRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -634,7 +710,7 @@ def analyze_move(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'analyze_move' not in self._stubs: - self._stubs['analyze_move'] = self.grpc_channel.unary_unary( + self._stubs['analyze_move'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/AnalyzeMove', request_serializer=asset_service.AnalyzeMoveRequest.serialize, response_deserializer=asset_service.AnalyzeMoveResponse.deserialize, @@ -675,7 +751,7 @@ def query_assets(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'query_assets' not in self._stubs: - self._stubs['query_assets'] = self.grpc_channel.unary_unary( + self._stubs['query_assets'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/QueryAssets', request_serializer=asset_service.QueryAssetsRequest.serialize, response_deserializer=asset_service.QueryAssetsResponse.deserialize, @@ -702,7 +778,7 @@ def create_saved_query(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_saved_query' not in self._stubs: - self._stubs['create_saved_query'] = self.grpc_channel.unary_unary( + self._stubs['create_saved_query'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/CreateSavedQuery', request_serializer=asset_service.CreateSavedQueryRequest.serialize, response_deserializer=asset_service.SavedQuery.deserialize, @@ -728,7 +804,7 @@ def get_saved_query(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_saved_query' not in self._stubs: - self._stubs['get_saved_query'] = self.grpc_channel.unary_unary( + self._stubs['get_saved_query'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/GetSavedQuery', request_serializer=asset_service.GetSavedQueryRequest.serialize, response_deserializer=asset_service.SavedQuery.deserialize, @@ -755,7 +831,7 @@ def list_saved_queries(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_saved_queries' not in self._stubs: - self._stubs['list_saved_queries'] = self.grpc_channel.unary_unary( + self._stubs['list_saved_queries'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/ListSavedQueries', request_serializer=asset_service.ListSavedQueriesRequest.serialize, response_deserializer=asset_service.ListSavedQueriesResponse.deserialize, @@ -781,7 +857,7 @@ def update_saved_query(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_saved_query' not in self._stubs: - self._stubs['update_saved_query'] = self.grpc_channel.unary_unary( + self._stubs['update_saved_query'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/UpdateSavedQuery', request_serializer=asset_service.UpdateSavedQueryRequest.serialize, response_deserializer=asset_service.SavedQuery.deserialize, @@ -807,7 +883,7 @@ def delete_saved_query(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_saved_query' not in self._stubs: - self._stubs['delete_saved_query'] = self.grpc_channel.unary_unary( + self._stubs['delete_saved_query'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/DeleteSavedQuery', request_serializer=asset_service.DeleteSavedQueryRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -834,7 +910,7 @@ def batch_get_effective_iam_policies(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'batch_get_effective_iam_policies' not in self._stubs: - self._stubs['batch_get_effective_iam_policies'] = self.grpc_channel.unary_unary( + self._stubs['batch_get_effective_iam_policies'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/BatchGetEffectiveIamPolicies', request_serializer=asset_service.BatchGetEffectiveIamPoliciesRequest.serialize, response_deserializer=asset_service.BatchGetEffectiveIamPoliciesResponse.deserialize, @@ -860,7 +936,7 @@ def analyze_org_policies(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'analyze_org_policies' not in self._stubs: - self._stubs['analyze_org_policies'] = self.grpc_channel.unary_unary( + self._stubs['analyze_org_policies'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicies', request_serializer=asset_service.AnalyzeOrgPoliciesRequest.serialize, response_deserializer=asset_service.AnalyzeOrgPoliciesResponse.deserialize, @@ -888,7 +964,7 @@ def analyze_org_policy_governed_containers(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'analyze_org_policy_governed_containers' not in self._stubs: - self._stubs['analyze_org_policy_governed_containers'] = self.grpc_channel.unary_unary( + self._stubs['analyze_org_policy_governed_containers'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedContainers', request_serializer=asset_service.AnalyzeOrgPolicyGovernedContainersRequest.serialize, response_deserializer=asset_service.AnalyzeOrgPolicyGovernedContainersResponse.deserialize, @@ -933,7 +1009,7 @@ def analyze_org_policy_governed_assets(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'analyze_org_policy_governed_assets' not in self._stubs: - self._stubs['analyze_org_policy_governed_assets'] = self.grpc_channel.unary_unary( + self._stubs['analyze_org_policy_governed_assets'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedAssets', request_serializer=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.serialize, response_deserializer=asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.deserialize, @@ -1127,6 +1203,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), } def _wrap_method(self, func, *args, **kwargs): @@ -1135,7 +1216,7 @@ def _wrap_method(self, func, *args, **kwargs): return gapic_v1.method_async.wrap_method(func, *args, **kwargs) def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() @property def kind(self) -> str: @@ -1152,7 +1233,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index 4c86709c4f..f222a8a46d 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -13,9 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging +import json # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries @@ -45,11 +46,18 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, - rest_version=requests_version, + rest_version=f"requests@{requests_version}", ) @@ -249,7 +257,7 @@ def post_update_saved_query(self, response): """ - def pre_analyze_iam_policy(self, request: asset_service.AnalyzeIamPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.AnalyzeIamPolicyRequest, Sequence[Tuple[str, str]]]: + def pre_analyze_iam_policy(self, request: asset_service.AnalyzeIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for analyze_iam_policy Override in a subclass to manipulate the request or metadata @@ -260,12 +268,32 @@ def pre_analyze_iam_policy(self, request: asset_service.AnalyzeIamPolicyRequest, def post_analyze_iam_policy(self, response: asset_service.AnalyzeIamPolicyResponse) -> asset_service.AnalyzeIamPolicyResponse: """Post-rpc interceptor for analyze_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_analyze_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_analyze_iam_policy` interceptor runs + before the `post_analyze_iam_policy_with_metadata` interceptor. """ return response - def pre_analyze_iam_policy_longrunning(self, request: asset_service.AnalyzeIamPolicyLongrunningRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.AnalyzeIamPolicyLongrunningRequest, Sequence[Tuple[str, str]]]: + + def post_analyze_iam_policy_with_metadata(self, response: asset_service.AnalyzeIamPolicyResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeIamPolicyResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for analyze_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_analyze_iam_policy_with_metadata` + interceptor in new development instead of the `post_analyze_iam_policy` interceptor. + When both interceptors are used, this `post_analyze_iam_policy_with_metadata` interceptor runs after the + `post_analyze_iam_policy` interceptor. The (possibly modified) response returned by + `post_analyze_iam_policy` will be passed to + `post_analyze_iam_policy_with_metadata`. + """ + return response, metadata + + def pre_analyze_iam_policy_longrunning(self, request: asset_service.AnalyzeIamPolicyLongrunningRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeIamPolicyLongrunningRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for analyze_iam_policy_longrunning Override in a subclass to manipulate the request or metadata @@ -276,12 +304,32 @@ def pre_analyze_iam_policy_longrunning(self, request: asset_service.AnalyzeIamPo def post_analyze_iam_policy_longrunning(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for analyze_iam_policy_longrunning - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_analyze_iam_policy_longrunning_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_analyze_iam_policy_longrunning` interceptor runs + before the `post_analyze_iam_policy_longrunning_with_metadata` interceptor. """ return response - def pre_analyze_move(self, request: asset_service.AnalyzeMoveRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.AnalyzeMoveRequest, Sequence[Tuple[str, str]]]: + + def post_analyze_iam_policy_longrunning_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for analyze_iam_policy_longrunning + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_analyze_iam_policy_longrunning_with_metadata` + interceptor in new development instead of the `post_analyze_iam_policy_longrunning` interceptor. + When both interceptors are used, this `post_analyze_iam_policy_longrunning_with_metadata` interceptor runs after the + `post_analyze_iam_policy_longrunning` interceptor. The (possibly modified) response returned by + `post_analyze_iam_policy_longrunning` will be passed to + `post_analyze_iam_policy_longrunning_with_metadata`. + """ + return response, metadata + + def pre_analyze_move(self, request: asset_service.AnalyzeMoveRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeMoveRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for analyze_move Override in a subclass to manipulate the request or metadata @@ -292,12 +340,32 @@ def pre_analyze_move(self, request: asset_service.AnalyzeMoveRequest, metadata: def post_analyze_move(self, response: asset_service.AnalyzeMoveResponse) -> asset_service.AnalyzeMoveResponse: """Post-rpc interceptor for analyze_move - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_analyze_move_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_analyze_move` interceptor runs + before the `post_analyze_move_with_metadata` interceptor. """ return response - def pre_analyze_org_policies(self, request: asset_service.AnalyzeOrgPoliciesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.AnalyzeOrgPoliciesRequest, Sequence[Tuple[str, str]]]: + + def post_analyze_move_with_metadata(self, response: asset_service.AnalyzeMoveResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeMoveResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for analyze_move + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_analyze_move_with_metadata` + interceptor in new development instead of the `post_analyze_move` interceptor. + When both interceptors are used, this `post_analyze_move_with_metadata` interceptor runs after the + `post_analyze_move` interceptor. The (possibly modified) response returned by + `post_analyze_move` will be passed to + `post_analyze_move_with_metadata`. + """ + return response, metadata + + def pre_analyze_org_policies(self, request: asset_service.AnalyzeOrgPoliciesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPoliciesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for analyze_org_policies Override in a subclass to manipulate the request or metadata @@ -308,12 +376,32 @@ def pre_analyze_org_policies(self, request: asset_service.AnalyzeOrgPoliciesRequ def post_analyze_org_policies(self, response: asset_service.AnalyzeOrgPoliciesResponse) -> asset_service.AnalyzeOrgPoliciesResponse: """Post-rpc interceptor for analyze_org_policies - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_analyze_org_policies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_analyze_org_policies` interceptor runs + before the `post_analyze_org_policies_with_metadata` interceptor. """ return response - def pre_analyze_org_policy_governed_assets(self, request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, Sequence[Tuple[str, str]]]: + + def post_analyze_org_policies_with_metadata(self, response: asset_service.AnalyzeOrgPoliciesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPoliciesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for analyze_org_policies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_analyze_org_policies_with_metadata` + interceptor in new development instead of the `post_analyze_org_policies` interceptor. + When both interceptors are used, this `post_analyze_org_policies_with_metadata` interceptor runs after the + `post_analyze_org_policies` interceptor. The (possibly modified) response returned by + `post_analyze_org_policies` will be passed to + `post_analyze_org_policies_with_metadata`. + """ + return response, metadata + + def pre_analyze_org_policy_governed_assets(self, request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for analyze_org_policy_governed_assets Override in a subclass to manipulate the request or metadata @@ -324,12 +412,32 @@ def pre_analyze_org_policy_governed_assets(self, request: asset_service.AnalyzeO def post_analyze_org_policy_governed_assets(self, response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse) -> asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: """Post-rpc interceptor for analyze_org_policy_governed_assets - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_analyze_org_policy_governed_assets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_analyze_org_policy_governed_assets` interceptor runs + before the `post_analyze_org_policy_governed_assets_with_metadata` interceptor. """ return response - def pre_analyze_org_policy_governed_containers(self, request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, Sequence[Tuple[str, str]]]: + + def post_analyze_org_policy_governed_assets_with_metadata(self, response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for analyze_org_policy_governed_assets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_analyze_org_policy_governed_assets_with_metadata` + interceptor in new development instead of the `post_analyze_org_policy_governed_assets` interceptor. + When both interceptors are used, this `post_analyze_org_policy_governed_assets_with_metadata` interceptor runs after the + `post_analyze_org_policy_governed_assets` interceptor. The (possibly modified) response returned by + `post_analyze_org_policy_governed_assets` will be passed to + `post_analyze_org_policy_governed_assets_with_metadata`. + """ + return response, metadata + + def pre_analyze_org_policy_governed_containers(self, request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for analyze_org_policy_governed_containers Override in a subclass to manipulate the request or metadata @@ -340,12 +448,32 @@ def pre_analyze_org_policy_governed_containers(self, request: asset_service.Anal def post_analyze_org_policy_governed_containers(self, response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse) -> asset_service.AnalyzeOrgPolicyGovernedContainersResponse: """Post-rpc interceptor for analyze_org_policy_governed_containers - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_analyze_org_policy_governed_containers_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_analyze_org_policy_governed_containers` interceptor runs + before the `post_analyze_org_policy_governed_containers_with_metadata` interceptor. """ return response - def pre_batch_get_assets_history(self, request: asset_service.BatchGetAssetsHistoryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.BatchGetAssetsHistoryRequest, Sequence[Tuple[str, str]]]: + + def post_analyze_org_policy_governed_containers_with_metadata(self, response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedContainersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for analyze_org_policy_governed_containers + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_analyze_org_policy_governed_containers_with_metadata` + interceptor in new development instead of the `post_analyze_org_policy_governed_containers` interceptor. + When both interceptors are used, this `post_analyze_org_policy_governed_containers_with_metadata` interceptor runs after the + `post_analyze_org_policy_governed_containers` interceptor. The (possibly modified) response returned by + `post_analyze_org_policy_governed_containers` will be passed to + `post_analyze_org_policy_governed_containers_with_metadata`. + """ + return response, metadata + + def pre_batch_get_assets_history(self, request: asset_service.BatchGetAssetsHistoryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.BatchGetAssetsHistoryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for batch_get_assets_history Override in a subclass to manipulate the request or metadata @@ -356,12 +484,32 @@ def pre_batch_get_assets_history(self, request: asset_service.BatchGetAssetsHist def post_batch_get_assets_history(self, response: asset_service.BatchGetAssetsHistoryResponse) -> asset_service.BatchGetAssetsHistoryResponse: """Post-rpc interceptor for batch_get_assets_history - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_get_assets_history_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_batch_get_assets_history` interceptor runs + before the `post_batch_get_assets_history_with_metadata` interceptor. """ return response - def pre_batch_get_effective_iam_policies(self, request: asset_service.BatchGetEffectiveIamPoliciesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.BatchGetEffectiveIamPoliciesRequest, Sequence[Tuple[str, str]]]: + + def post_batch_get_assets_history_with_metadata(self, response: asset_service.BatchGetAssetsHistoryResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.BatchGetAssetsHistoryResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_get_assets_history + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_batch_get_assets_history_with_metadata` + interceptor in new development instead of the `post_batch_get_assets_history` interceptor. + When both interceptors are used, this `post_batch_get_assets_history_with_metadata` interceptor runs after the + `post_batch_get_assets_history` interceptor. The (possibly modified) response returned by + `post_batch_get_assets_history` will be passed to + `post_batch_get_assets_history_with_metadata`. + """ + return response, metadata + + def pre_batch_get_effective_iam_policies(self, request: asset_service.BatchGetEffectiveIamPoliciesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.BatchGetEffectiveIamPoliciesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for batch_get_effective_iam_policies Override in a subclass to manipulate the request or metadata @@ -372,12 +520,32 @@ def pre_batch_get_effective_iam_policies(self, request: asset_service.BatchGetEf def post_batch_get_effective_iam_policies(self, response: asset_service.BatchGetEffectiveIamPoliciesResponse) -> asset_service.BatchGetEffectiveIamPoliciesResponse: """Post-rpc interceptor for batch_get_effective_iam_policies - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_get_effective_iam_policies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_batch_get_effective_iam_policies` interceptor runs + before the `post_batch_get_effective_iam_policies_with_metadata` interceptor. """ return response - def pre_create_feed(self, request: asset_service.CreateFeedRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.CreateFeedRequest, Sequence[Tuple[str, str]]]: + + def post_batch_get_effective_iam_policies_with_metadata(self, response: asset_service.BatchGetEffectiveIamPoliciesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.BatchGetEffectiveIamPoliciesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_get_effective_iam_policies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_batch_get_effective_iam_policies_with_metadata` + interceptor in new development instead of the `post_batch_get_effective_iam_policies` interceptor. + When both interceptors are used, this `post_batch_get_effective_iam_policies_with_metadata` interceptor runs after the + `post_batch_get_effective_iam_policies` interceptor. The (possibly modified) response returned by + `post_batch_get_effective_iam_policies` will be passed to + `post_batch_get_effective_iam_policies_with_metadata`. + """ + return response, metadata + + def pre_create_feed(self, request: asset_service.CreateFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.CreateFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_feed Override in a subclass to manipulate the request or metadata @@ -388,12 +556,32 @@ def pre_create_feed(self, request: asset_service.CreateFeedRequest, metadata: Se def post_create_feed(self, response: asset_service.Feed) -> asset_service.Feed: """Post-rpc interceptor for create_feed - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_feed_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_create_feed` interceptor runs + before the `post_create_feed_with_metadata` interceptor. """ return response - def pre_create_saved_query(self, request: asset_service.CreateSavedQueryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.CreateSavedQueryRequest, Sequence[Tuple[str, str]]]: + + def post_create_feed_with_metadata(self, response: asset_service.Feed, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_feed + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_create_feed_with_metadata` + interceptor in new development instead of the `post_create_feed` interceptor. + When both interceptors are used, this `post_create_feed_with_metadata` interceptor runs after the + `post_create_feed` interceptor. The (possibly modified) response returned by + `post_create_feed` will be passed to + `post_create_feed_with_metadata`. + """ + return response, metadata + + def pre_create_saved_query(self, request: asset_service.CreateSavedQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.CreateSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_saved_query Override in a subclass to manipulate the request or metadata @@ -404,12 +592,32 @@ def pre_create_saved_query(self, request: asset_service.CreateSavedQueryRequest, def post_create_saved_query(self, response: asset_service.SavedQuery) -> asset_service.SavedQuery: """Post-rpc interceptor for create_saved_query - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_saved_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_create_saved_query` interceptor runs + before the `post_create_saved_query_with_metadata` interceptor. """ return response - def pre_delete_feed(self, request: asset_service.DeleteFeedRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.DeleteFeedRequest, Sequence[Tuple[str, str]]]: + + def post_create_saved_query_with_metadata(self, response: asset_service.SavedQuery, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SavedQuery, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_saved_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_create_saved_query_with_metadata` + interceptor in new development instead of the `post_create_saved_query` interceptor. + When both interceptors are used, this `post_create_saved_query_with_metadata` interceptor runs after the + `post_create_saved_query` interceptor. The (possibly modified) response returned by + `post_create_saved_query` will be passed to + `post_create_saved_query_with_metadata`. + """ + return response, metadata + + def pre_delete_feed(self, request: asset_service.DeleteFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.DeleteFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_feed Override in a subclass to manipulate the request or metadata @@ -417,7 +625,7 @@ def pre_delete_feed(self, request: asset_service.DeleteFeedRequest, metadata: Se """ return request, metadata - def pre_delete_saved_query(self, request: asset_service.DeleteSavedQueryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.DeleteSavedQueryRequest, Sequence[Tuple[str, str]]]: + def pre_delete_saved_query(self, request: asset_service.DeleteSavedQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.DeleteSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_saved_query Override in a subclass to manipulate the request or metadata @@ -425,7 +633,7 @@ def pre_delete_saved_query(self, request: asset_service.DeleteSavedQueryRequest, """ return request, metadata - def pre_export_assets(self, request: asset_service.ExportAssetsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.ExportAssetsRequest, Sequence[Tuple[str, str]]]: + def pre_export_assets(self, request: asset_service.ExportAssetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ExportAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for export_assets Override in a subclass to manipulate the request or metadata @@ -436,12 +644,32 @@ def pre_export_assets(self, request: asset_service.ExportAssetsRequest, metadata def post_export_assets(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for export_assets - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_assets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_export_assets` interceptor runs + before the `post_export_assets_with_metadata` interceptor. """ return response - def pre_get_feed(self, request: asset_service.GetFeedRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.GetFeedRequest, Sequence[Tuple[str, str]]]: + + def post_export_assets_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_assets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_export_assets_with_metadata` + interceptor in new development instead of the `post_export_assets` interceptor. + When both interceptors are used, this `post_export_assets_with_metadata` interceptor runs after the + `post_export_assets` interceptor. The (possibly modified) response returned by + `post_export_assets` will be passed to + `post_export_assets_with_metadata`. + """ + return response, metadata + + def pre_get_feed(self, request: asset_service.GetFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.GetFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_feed Override in a subclass to manipulate the request or metadata @@ -452,12 +680,32 @@ def pre_get_feed(self, request: asset_service.GetFeedRequest, metadata: Sequence def post_get_feed(self, response: asset_service.Feed) -> asset_service.Feed: """Post-rpc interceptor for get_feed - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_feed_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_get_feed` interceptor runs + before the `post_get_feed_with_metadata` interceptor. """ return response - def pre_get_saved_query(self, request: asset_service.GetSavedQueryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.GetSavedQueryRequest, Sequence[Tuple[str, str]]]: + + def post_get_feed_with_metadata(self, response: asset_service.Feed, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_feed + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_get_feed_with_metadata` + interceptor in new development instead of the `post_get_feed` interceptor. + When both interceptors are used, this `post_get_feed_with_metadata` interceptor runs after the + `post_get_feed` interceptor. The (possibly modified) response returned by + `post_get_feed` will be passed to + `post_get_feed_with_metadata`. + """ + return response, metadata + + def pre_get_saved_query(self, request: asset_service.GetSavedQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.GetSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_saved_query Override in a subclass to manipulate the request or metadata @@ -468,12 +716,32 @@ def pre_get_saved_query(self, request: asset_service.GetSavedQueryRequest, metad def post_get_saved_query(self, response: asset_service.SavedQuery) -> asset_service.SavedQuery: """Post-rpc interceptor for get_saved_query - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_saved_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_get_saved_query` interceptor runs + before the `post_get_saved_query_with_metadata` interceptor. """ return response - def pre_list_assets(self, request: asset_service.ListAssetsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.ListAssetsRequest, Sequence[Tuple[str, str]]]: + + def post_get_saved_query_with_metadata(self, response: asset_service.SavedQuery, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SavedQuery, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_saved_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_get_saved_query_with_metadata` + interceptor in new development instead of the `post_get_saved_query` interceptor. + When both interceptors are used, this `post_get_saved_query_with_metadata` interceptor runs after the + `post_get_saved_query` interceptor. The (possibly modified) response returned by + `post_get_saved_query` will be passed to + `post_get_saved_query_with_metadata`. + """ + return response, metadata + + def pre_list_assets(self, request: asset_service.ListAssetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_assets Override in a subclass to manipulate the request or metadata @@ -484,12 +752,32 @@ def pre_list_assets(self, request: asset_service.ListAssetsRequest, metadata: Se def post_list_assets(self, response: asset_service.ListAssetsResponse) -> asset_service.ListAssetsResponse: """Post-rpc interceptor for list_assets - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_assets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_list_assets` interceptor runs + before the `post_list_assets_with_metadata` interceptor. """ return response - def pre_list_feeds(self, request: asset_service.ListFeedsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.ListFeedsRequest, Sequence[Tuple[str, str]]]: + + def post_list_assets_with_metadata(self, response: asset_service.ListAssetsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_assets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_list_assets_with_metadata` + interceptor in new development instead of the `post_list_assets` interceptor. + When both interceptors are used, this `post_list_assets_with_metadata` interceptor runs after the + `post_list_assets` interceptor. The (possibly modified) response returned by + `post_list_assets` will be passed to + `post_list_assets_with_metadata`. + """ + return response, metadata + + def pre_list_feeds(self, request: asset_service.ListFeedsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListFeedsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_feeds Override in a subclass to manipulate the request or metadata @@ -500,12 +788,32 @@ def pre_list_feeds(self, request: asset_service.ListFeedsRequest, metadata: Sequ def post_list_feeds(self, response: asset_service.ListFeedsResponse) -> asset_service.ListFeedsResponse: """Post-rpc interceptor for list_feeds - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_feeds_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_list_feeds` interceptor runs + before the `post_list_feeds_with_metadata` interceptor. """ return response - def pre_list_saved_queries(self, request: asset_service.ListSavedQueriesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.ListSavedQueriesRequest, Sequence[Tuple[str, str]]]: + + def post_list_feeds_with_metadata(self, response: asset_service.ListFeedsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListFeedsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_feeds + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_list_feeds_with_metadata` + interceptor in new development instead of the `post_list_feeds` interceptor. + When both interceptors are used, this `post_list_feeds_with_metadata` interceptor runs after the + `post_list_feeds` interceptor. The (possibly modified) response returned by + `post_list_feeds` will be passed to + `post_list_feeds_with_metadata`. + """ + return response, metadata + + def pre_list_saved_queries(self, request: asset_service.ListSavedQueriesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListSavedQueriesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_saved_queries Override in a subclass to manipulate the request or metadata @@ -516,12 +824,32 @@ def pre_list_saved_queries(self, request: asset_service.ListSavedQueriesRequest, def post_list_saved_queries(self, response: asset_service.ListSavedQueriesResponse) -> asset_service.ListSavedQueriesResponse: """Post-rpc interceptor for list_saved_queries - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_saved_queries_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_list_saved_queries` interceptor runs + before the `post_list_saved_queries_with_metadata` interceptor. """ return response - def pre_query_assets(self, request: asset_service.QueryAssetsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.QueryAssetsRequest, Sequence[Tuple[str, str]]]: + + def post_list_saved_queries_with_metadata(self, response: asset_service.ListSavedQueriesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListSavedQueriesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_saved_queries + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_list_saved_queries_with_metadata` + interceptor in new development instead of the `post_list_saved_queries` interceptor. + When both interceptors are used, this `post_list_saved_queries_with_metadata` interceptor runs after the + `post_list_saved_queries` interceptor. The (possibly modified) response returned by + `post_list_saved_queries` will be passed to + `post_list_saved_queries_with_metadata`. + """ + return response, metadata + + def pre_query_assets(self, request: asset_service.QueryAssetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.QueryAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for query_assets Override in a subclass to manipulate the request or metadata @@ -532,12 +860,32 @@ def pre_query_assets(self, request: asset_service.QueryAssetsRequest, metadata: def post_query_assets(self, response: asset_service.QueryAssetsResponse) -> asset_service.QueryAssetsResponse: """Post-rpc interceptor for query_assets - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_query_assets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_query_assets` interceptor runs + before the `post_query_assets_with_metadata` interceptor. """ return response - def pre_search_all_iam_policies(self, request: asset_service.SearchAllIamPoliciesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.SearchAllIamPoliciesRequest, Sequence[Tuple[str, str]]]: + + def post_query_assets_with_metadata(self, response: asset_service.QueryAssetsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.QueryAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for query_assets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_query_assets_with_metadata` + interceptor in new development instead of the `post_query_assets` interceptor. + When both interceptors are used, this `post_query_assets_with_metadata` interceptor runs after the + `post_query_assets` interceptor. The (possibly modified) response returned by + `post_query_assets` will be passed to + `post_query_assets_with_metadata`. + """ + return response, metadata + + def pre_search_all_iam_policies(self, request: asset_service.SearchAllIamPoliciesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SearchAllIamPoliciesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for search_all_iam_policies Override in a subclass to manipulate the request or metadata @@ -548,12 +896,32 @@ def pre_search_all_iam_policies(self, request: asset_service.SearchAllIamPolicie def post_search_all_iam_policies(self, response: asset_service.SearchAllIamPoliciesResponse) -> asset_service.SearchAllIamPoliciesResponse: """Post-rpc interceptor for search_all_iam_policies - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_all_iam_policies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_search_all_iam_policies` interceptor runs + before the `post_search_all_iam_policies_with_metadata` interceptor. """ return response - def pre_search_all_resources(self, request: asset_service.SearchAllResourcesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.SearchAllResourcesRequest, Sequence[Tuple[str, str]]]: + + def post_search_all_iam_policies_with_metadata(self, response: asset_service.SearchAllIamPoliciesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SearchAllIamPoliciesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for search_all_iam_policies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_search_all_iam_policies_with_metadata` + interceptor in new development instead of the `post_search_all_iam_policies` interceptor. + When both interceptors are used, this `post_search_all_iam_policies_with_metadata` interceptor runs after the + `post_search_all_iam_policies` interceptor. The (possibly modified) response returned by + `post_search_all_iam_policies` will be passed to + `post_search_all_iam_policies_with_metadata`. + """ + return response, metadata + + def pre_search_all_resources(self, request: asset_service.SearchAllResourcesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SearchAllResourcesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for search_all_resources Override in a subclass to manipulate the request or metadata @@ -564,12 +932,32 @@ def pre_search_all_resources(self, request: asset_service.SearchAllResourcesRequ def post_search_all_resources(self, response: asset_service.SearchAllResourcesResponse) -> asset_service.SearchAllResourcesResponse: """Post-rpc interceptor for search_all_resources - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_all_resources_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_search_all_resources` interceptor runs + before the `post_search_all_resources_with_metadata` interceptor. """ return response - def pre_update_feed(self, request: asset_service.UpdateFeedRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.UpdateFeedRequest, Sequence[Tuple[str, str]]]: + + def post_search_all_resources_with_metadata(self, response: asset_service.SearchAllResourcesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SearchAllResourcesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for search_all_resources + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_search_all_resources_with_metadata` + interceptor in new development instead of the `post_search_all_resources` interceptor. + When both interceptors are used, this `post_search_all_resources_with_metadata` interceptor runs after the + `post_search_all_resources` interceptor. The (possibly modified) response returned by + `post_search_all_resources` will be passed to + `post_search_all_resources_with_metadata`. + """ + return response, metadata + + def pre_update_feed(self, request: asset_service.UpdateFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.UpdateFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_feed Override in a subclass to manipulate the request or metadata @@ -580,12 +968,32 @@ def pre_update_feed(self, request: asset_service.UpdateFeedRequest, metadata: Se def post_update_feed(self, response: asset_service.Feed) -> asset_service.Feed: """Post-rpc interceptor for update_feed - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_feed_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_update_feed` interceptor runs + before the `post_update_feed_with_metadata` interceptor. """ return response - def pre_update_saved_query(self, request: asset_service.UpdateSavedQueryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.UpdateSavedQueryRequest, Sequence[Tuple[str, str]]]: + + def post_update_feed_with_metadata(self, response: asset_service.Feed, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_feed + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_update_feed_with_metadata` + interceptor in new development instead of the `post_update_feed` interceptor. + When both interceptors are used, this `post_update_feed_with_metadata` interceptor runs after the + `post_update_feed` interceptor. The (possibly modified) response returned by + `post_update_feed` will be passed to + `post_update_feed_with_metadata`. + """ + return response, metadata + + def pre_update_saved_query(self, request: asset_service.UpdateSavedQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.UpdateSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_saved_query Override in a subclass to manipulate the request or metadata @@ -596,15 +1004,34 @@ def pre_update_saved_query(self, request: asset_service.UpdateSavedQueryRequest, def post_update_saved_query(self, response: asset_service.SavedQuery) -> asset_service.SavedQuery: """Post-rpc interceptor for update_saved_query - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_saved_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_update_saved_query` interceptor runs + before the `post_update_saved_query_with_metadata` interceptor. """ return response + def post_update_saved_query_with_metadata(self, response: asset_service.SavedQuery, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SavedQuery, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_saved_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_update_saved_query_with_metadata` + interceptor in new development instead of the `post_update_saved_query` interceptor. + When both interceptors are used, this `post_update_saved_query_with_metadata` interceptor runs after the + `post_update_saved_query` interceptor. The (possibly modified) response returned by + `post_update_saved_query` will be passed to + `post_update_saved_query_with_metadata`. + """ + return response, metadata + def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata @@ -774,7 +1201,7 @@ def __call__(self, request: asset_service.AnalyzeIamPolicyRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.AnalyzeIamPolicyResponse: r"""Call the analyze iam policy method over HTTP. @@ -785,8 +1212,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.AnalyzeIamPolicyResponse: @@ -796,12 +1225,36 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_http_options() + request, metadata = self._interceptor.pre_analyze_iam_policy(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeIamPolicy", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "AnalyzeIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = AssetServiceRestTransport._AnalyzeIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -815,7 +1268,29 @@ def __call__(self, pb_resp = asset_service.AnalyzeIamPolicyResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_analyze_iam_policy_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.AnalyzeIamPolicyResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_iam_policy", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "AnalyzeIamPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _AnalyzeIamPolicyLongrunning(_BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning, AssetServiceRestStub): @@ -849,7 +1324,7 @@ def __call__(self, request: asset_service.AnalyzeIamPolicyLongrunningRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the analyze iam policy longrunning method over HTTP. @@ -861,8 +1336,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -873,6 +1350,7 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_http_options() + request, metadata = self._interceptor.pre_analyze_iam_policy_longrunning(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_transcoded_request(http_options, request) @@ -881,6 +1359,29 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeIamPolicyLongrunning", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "AnalyzeIamPolicyLongrunning", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = AssetServiceRestTransport._AnalyzeIamPolicyLongrunning._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -892,7 +1393,29 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_iam_policy_longrunning(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_analyze_iam_policy_longrunning_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_iam_policy_longrunning", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "AnalyzeIamPolicyLongrunning", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _AnalyzeMove(_BaseAssetServiceRestTransport._BaseAnalyzeMove, AssetServiceRestStub): @@ -925,7 +1448,7 @@ def __call__(self, request: asset_service.AnalyzeMoveRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.AnalyzeMoveResponse: r"""Call the analyze move method over HTTP. @@ -936,8 +1459,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.AnalyzeMoveResponse: @@ -947,12 +1472,36 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseAnalyzeMove._get_http_options() + request, metadata = self._interceptor.pre_analyze_move(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeMove._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseAnalyzeMove._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeMove", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "AnalyzeMove", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = AssetServiceRestTransport._AnalyzeMove._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -966,7 +1515,29 @@ def __call__(self, pb_resp = asset_service.AnalyzeMoveResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_move(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_analyze_move_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.AnalyzeMoveResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_move", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "AnalyzeMove", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _AnalyzeOrgPolicies(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies, AssetServiceRestStub): @@ -999,7 +1570,7 @@ def __call__(self, request: asset_service.AnalyzeOrgPoliciesRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.AnalyzeOrgPoliciesResponse: r"""Call the analyze org policies method over HTTP. @@ -1010,8 +1581,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.AnalyzeOrgPoliciesResponse: @@ -1021,12 +1594,36 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_http_options() + request, metadata = self._interceptor.pre_analyze_org_policies(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeOrgPolicies", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "AnalyzeOrgPolicies", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = AssetServiceRestTransport._AnalyzeOrgPolicies._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1040,7 +1637,29 @@ def __call__(self, pb_resp = asset_service.AnalyzeOrgPoliciesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_org_policies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_analyze_org_policies_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.AnalyzeOrgPoliciesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_org_policies", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "AnalyzeOrgPolicies", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _AnalyzeOrgPolicyGovernedAssets(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets, AssetServiceRestStub): @@ -1073,7 +1692,7 @@ def __call__(self, request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: r"""Call the analyze org policy governed assets method over HTTP. @@ -1085,8 +1704,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: @@ -1096,12 +1717,36 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_http_options() + request, metadata = self._interceptor.pre_analyze_org_policy_governed_assets(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeOrgPolicyGovernedAssets", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "AnalyzeOrgPolicyGovernedAssets", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = AssetServiceRestTransport._AnalyzeOrgPolicyGovernedAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1115,7 +1760,29 @@ def __call__(self, pb_resp = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_org_policy_governed_assets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_analyze_org_policy_governed_assets_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_org_policy_governed_assets", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "AnalyzeOrgPolicyGovernedAssets", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _AnalyzeOrgPolicyGovernedContainers(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers, AssetServiceRestStub): @@ -1148,7 +1815,7 @@ def __call__(self, request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.AnalyzeOrgPolicyGovernedContainersResponse: r"""Call the analyze org policy governed containers method over HTTP. @@ -1160,8 +1827,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.AnalyzeOrgPolicyGovernedContainersResponse: @@ -1171,12 +1840,36 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_http_options() + request, metadata = self._interceptor.pre_analyze_org_policy_governed_containers(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeOrgPolicyGovernedContainers", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "AnalyzeOrgPolicyGovernedContainers", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = AssetServiceRestTransport._AnalyzeOrgPolicyGovernedContainers._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1190,7 +1883,29 @@ def __call__(self, pb_resp = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_org_policy_governed_containers(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_analyze_org_policy_governed_containers_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_org_policy_governed_containers", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "AnalyzeOrgPolicyGovernedContainers", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _BatchGetAssetsHistory(_BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory, AssetServiceRestStub): @@ -1223,7 +1938,7 @@ def __call__(self, request: asset_service.BatchGetAssetsHistoryRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.BatchGetAssetsHistoryResponse: r"""Call the batch get assets history method over HTTP. @@ -1233,8 +1948,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.BatchGetAssetsHistoryResponse: @@ -1242,12 +1959,36 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_http_options() + request, metadata = self._interceptor.pre_batch_get_assets_history(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.BatchGetAssetsHistory", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "BatchGetAssetsHistory", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = AssetServiceRestTransport._BatchGetAssetsHistory._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1261,7 +2002,29 @@ def __call__(self, pb_resp = asset_service.BatchGetAssetsHistoryResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_get_assets_history(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_get_assets_history_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.BatchGetAssetsHistoryResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.batch_get_assets_history", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "BatchGetAssetsHistory", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _BatchGetEffectiveIamPolicies(_BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies, AssetServiceRestStub): @@ -1294,7 +2057,7 @@ def __call__(self, request: asset_service.BatchGetEffectiveIamPoliciesRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: r"""Call the batch get effective iam policies method over HTTP. @@ -1306,8 +2069,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.BatchGetEffectiveIamPoliciesResponse: @@ -1317,12 +2082,36 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_http_options() + request, metadata = self._interceptor.pre_batch_get_effective_iam_policies(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.BatchGetEffectiveIamPolicies", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "BatchGetEffectiveIamPolicies", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = AssetServiceRestTransport._BatchGetEffectiveIamPolicies._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1336,7 +2125,29 @@ def __call__(self, pb_resp = asset_service.BatchGetEffectiveIamPoliciesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_get_effective_iam_policies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_get_effective_iam_policies_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.BatchGetEffectiveIamPoliciesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.batch_get_effective_iam_policies", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "BatchGetEffectiveIamPolicies", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _CreateFeed(_BaseAssetServiceRestTransport._BaseCreateFeed, AssetServiceRestStub): @@ -1370,7 +2181,7 @@ def __call__(self, request: asset_service.CreateFeedRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.Feed: r"""Call the create feed method over HTTP. @@ -1380,8 +2191,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.Feed: @@ -1397,6 +2210,7 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseCreateFeed._get_http_options() + request, metadata = self._interceptor.pre_create_feed(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseCreateFeed._get_transcoded_request(http_options, request) @@ -1405,6 +2219,29 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseCreateFeed._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.CreateFeed", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "CreateFeed", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = AssetServiceRestTransport._CreateFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -1418,7 +2255,29 @@ def __call__(self, pb_resp = asset_service.Feed.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_feed(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_feed_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.Feed.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.create_feed", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "CreateFeed", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _CreateSavedQuery(_BaseAssetServiceRestTransport._BaseCreateSavedQuery, AssetServiceRestStub): @@ -1452,7 +2311,7 @@ def __call__(self, request: asset_service.CreateSavedQueryRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.SavedQuery: r"""Call the create saved query method over HTTP. @@ -1462,8 +2321,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.SavedQuery: @@ -1473,6 +2334,7 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_http_options() + request, metadata = self._interceptor.pre_create_saved_query(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_transcoded_request(http_options, request) @@ -1481,6 +2343,29 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.CreateSavedQuery", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "CreateSavedQuery", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = AssetServiceRestTransport._CreateSavedQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -1494,7 +2379,29 @@ def __call__(self, pb_resp = asset_service.SavedQuery.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_saved_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_saved_query_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.SavedQuery.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.create_saved_query", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "CreateSavedQuery", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _DeleteFeed(_BaseAssetServiceRestTransport._BaseDeleteFeed, AssetServiceRestStub): @@ -1527,7 +2434,7 @@ def __call__(self, request: asset_service.DeleteFeedRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ): r"""Call the delete feed method over HTTP. @@ -1537,17 +2444,43 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = _BaseAssetServiceRestTransport._BaseDeleteFeed._get_http_options() + request, metadata = self._interceptor.pre_delete_feed(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseDeleteFeed._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseDeleteFeed._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.DeleteFeed", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "DeleteFeed", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = AssetServiceRestTransport._DeleteFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1586,7 +2519,7 @@ def __call__(self, request: asset_service.DeleteSavedQueryRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ): r"""Call the delete saved query method over HTTP. @@ -1596,17 +2529,43 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = _BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_http_options() + request, metadata = self._interceptor.pre_delete_saved_query(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.DeleteSavedQuery", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "DeleteSavedQuery", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = AssetServiceRestTransport._DeleteSavedQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1646,7 +2605,7 @@ def __call__(self, request: asset_service.ExportAssetsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the export assets method over HTTP. @@ -1656,8 +2615,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1668,6 +2629,7 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseExportAssets._get_http_options() + request, metadata = self._interceptor.pre_export_assets(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseExportAssets._get_transcoded_request(http_options, request) @@ -1676,6 +2638,29 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseExportAssets._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.ExportAssets", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "ExportAssets", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = AssetServiceRestTransport._ExportAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -1687,7 +2672,29 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_assets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_assets_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.export_assets", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "ExportAssets", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _GetFeed(_BaseAssetServiceRestTransport._BaseGetFeed, AssetServiceRestStub): @@ -1720,7 +2727,7 @@ def __call__(self, request: asset_service.GetFeedRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.Feed: r"""Call the get feed method over HTTP. @@ -1730,8 +2737,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.Feed: @@ -1747,12 +2756,36 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseGetFeed._get_http_options() + request, metadata = self._interceptor.pre_get_feed(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseGetFeed._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseGetFeed._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.GetFeed", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "GetFeed", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = AssetServiceRestTransport._GetFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1766,7 +2799,29 @@ def __call__(self, pb_resp = asset_service.Feed.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_feed(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_feed_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.Feed.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.get_feed", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "GetFeed", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _GetSavedQuery(_BaseAssetServiceRestTransport._BaseGetSavedQuery, AssetServiceRestStub): @@ -1799,7 +2854,7 @@ def __call__(self, request: asset_service.GetSavedQueryRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.SavedQuery: r"""Call the get saved query method over HTTP. @@ -1809,8 +2864,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.SavedQuery: @@ -1820,12 +2877,36 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseGetSavedQuery._get_http_options() + request, metadata = self._interceptor.pre_get_saved_query(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseGetSavedQuery._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseGetSavedQuery._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.GetSavedQuery", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "GetSavedQuery", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = AssetServiceRestTransport._GetSavedQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1839,7 +2920,29 @@ def __call__(self, pb_resp = asset_service.SavedQuery.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_saved_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_saved_query_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.SavedQuery.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.get_saved_query", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "GetSavedQuery", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ListAssets(_BaseAssetServiceRestTransport._BaseListAssets, AssetServiceRestStub): @@ -1872,7 +2975,7 @@ def __call__(self, request: asset_service.ListAssetsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.ListAssetsResponse: r"""Call the list assets method over HTTP. @@ -1882,8 +2985,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.ListAssetsResponse: @@ -1891,12 +2996,36 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseListAssets._get_http_options() + request, metadata = self._interceptor.pre_list_assets(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseListAssets._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseListAssets._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.ListAssets", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "ListAssets", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = AssetServiceRestTransport._ListAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1910,7 +3039,29 @@ def __call__(self, pb_resp = asset_service.ListAssetsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_assets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_assets_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.ListAssetsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.list_assets", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "ListAssets", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ListFeeds(_BaseAssetServiceRestTransport._BaseListFeeds, AssetServiceRestStub): @@ -1943,7 +3094,7 @@ def __call__(self, request: asset_service.ListFeedsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.ListFeedsResponse: r"""Call the list feeds method over HTTP. @@ -1953,8 +3104,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.ListFeedsResponse: @@ -1962,12 +3115,36 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseListFeeds._get_http_options() + request, metadata = self._interceptor.pre_list_feeds(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseListFeeds._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseListFeeds._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.ListFeeds", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "ListFeeds", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = AssetServiceRestTransport._ListFeeds._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1981,7 +3158,29 @@ def __call__(self, pb_resp = asset_service.ListFeedsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_feeds(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_feeds_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.ListFeedsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.list_feeds", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "ListFeeds", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ListSavedQueries(_BaseAssetServiceRestTransport._BaseListSavedQueries, AssetServiceRestStub): @@ -2014,7 +3213,7 @@ def __call__(self, request: asset_service.ListSavedQueriesRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.ListSavedQueriesResponse: r"""Call the list saved queries method over HTTP. @@ -2024,8 +3223,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.ListSavedQueriesResponse: @@ -2033,12 +3234,36 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseListSavedQueries._get_http_options() + request, metadata = self._interceptor.pre_list_saved_queries(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseListSavedQueries._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseListSavedQueries._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.ListSavedQueries", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "ListSavedQueries", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = AssetServiceRestTransport._ListSavedQueries._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -2052,7 +3277,29 @@ def __call__(self, pb_resp = asset_service.ListSavedQueriesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_saved_queries(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_saved_queries_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.ListSavedQueriesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.list_saved_queries", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "ListSavedQueries", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _QueryAssets(_BaseAssetServiceRestTransport._BaseQueryAssets, AssetServiceRestStub): @@ -2086,7 +3333,7 @@ def __call__(self, request: asset_service.QueryAssetsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.QueryAssetsResponse: r"""Call the query assets method over HTTP. @@ -2096,8 +3343,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.QueryAssetsResponse: @@ -2105,6 +3354,7 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseQueryAssets._get_http_options() + request, metadata = self._interceptor.pre_query_assets(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseQueryAssets._get_transcoded_request(http_options, request) @@ -2113,6 +3363,29 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseQueryAssets._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.QueryAssets", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "QueryAssets", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = AssetServiceRestTransport._QueryAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -2126,7 +3399,29 @@ def __call__(self, pb_resp = asset_service.QueryAssetsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_query_assets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_query_assets_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.QueryAssetsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.query_assets", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "QueryAssets", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _SearchAllIamPolicies(_BaseAssetServiceRestTransport._BaseSearchAllIamPolicies, AssetServiceRestStub): @@ -2159,7 +3454,7 @@ def __call__(self, request: asset_service.SearchAllIamPoliciesRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.SearchAllIamPoliciesResponse: r"""Call the search all iam policies method over HTTP. @@ -2169,8 +3464,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.SearchAllIamPoliciesResponse: @@ -2178,12 +3475,36 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_http_options() + request, metadata = self._interceptor.pre_search_all_iam_policies(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.SearchAllIamPolicies", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "SearchAllIamPolicies", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = AssetServiceRestTransport._SearchAllIamPolicies._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -2197,7 +3518,29 @@ def __call__(self, pb_resp = asset_service.SearchAllIamPoliciesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_search_all_iam_policies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_all_iam_policies_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.SearchAllIamPoliciesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.search_all_iam_policies", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "SearchAllIamPolicies", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _SearchAllResources(_BaseAssetServiceRestTransport._BaseSearchAllResources, AssetServiceRestStub): @@ -2230,7 +3573,7 @@ def __call__(self, request: asset_service.SearchAllResourcesRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.SearchAllResourcesResponse: r"""Call the search all resources method over HTTP. @@ -2240,8 +3583,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.SearchAllResourcesResponse: @@ -2249,12 +3594,36 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseSearchAllResources._get_http_options() + request, metadata = self._interceptor.pre_search_all_resources(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseSearchAllResources._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseSearchAllResources._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.SearchAllResources", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "SearchAllResources", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = AssetServiceRestTransport._SearchAllResources._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -2268,7 +3637,29 @@ def __call__(self, pb_resp = asset_service.SearchAllResourcesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_search_all_resources(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_all_resources_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.SearchAllResourcesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.search_all_resources", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "SearchAllResources", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _UpdateFeed(_BaseAssetServiceRestTransport._BaseUpdateFeed, AssetServiceRestStub): @@ -2302,7 +3693,7 @@ def __call__(self, request: asset_service.UpdateFeedRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.Feed: r"""Call the update feed method over HTTP. @@ -2312,8 +3703,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.Feed: @@ -2329,6 +3722,7 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseUpdateFeed._get_http_options() + request, metadata = self._interceptor.pre_update_feed(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseUpdateFeed._get_transcoded_request(http_options, request) @@ -2337,6 +3731,29 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseUpdateFeed._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.UpdateFeed", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "UpdateFeed", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = AssetServiceRestTransport._UpdateFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -2350,7 +3767,29 @@ def __call__(self, pb_resp = asset_service.Feed.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_feed(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_feed_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.Feed.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.update_feed", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "UpdateFeed", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _UpdateSavedQuery(_BaseAssetServiceRestTransport._BaseUpdateSavedQuery, AssetServiceRestStub): @@ -2384,7 +3823,7 @@ def __call__(self, request: asset_service.UpdateSavedQueryRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.SavedQuery: r"""Call the update saved query method over HTTP. @@ -2394,8 +3833,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.SavedQuery: @@ -2405,6 +3846,7 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_http_options() + request, metadata = self._interceptor.pre_update_saved_query(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_transcoded_request(http_options, request) @@ -2413,6 +3855,29 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.UpdateSavedQuery", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "UpdateSavedQuery", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = AssetServiceRestTransport._UpdateSavedQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -2426,7 +3891,29 @@ def __call__(self, pb_resp = asset_service.SavedQuery.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_saved_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_saved_query_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.SavedQuery.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.update_saved_query", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "UpdateSavedQuery", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property @@ -2618,6 +4105,9 @@ def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore class _GetOperation(_BaseAssetServiceRestTransport._BaseGetOperation, AssetServiceRestStub): + def __hash__(self): + return hash("AssetServiceRestTransport.GetOperation") + @staticmethod def _get_response( host, @@ -2644,7 +4134,7 @@ def __call__(self, request: operations_pb2.GetOperationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. @@ -2655,20 +4145,46 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: operations_pb2.Operation: Response from GetOperation method. """ http_options = _BaseAssetServiceRestTransport._BaseGetOperation._get_http_options() + request, metadata = self._interceptor.pre_get_operation(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.GetOperation", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = AssetServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -2677,9 +4193,29 @@ def __call__(self, if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceAsyncClient.GetOperation", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py index 116512120a..7c4d227f99 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py @@ -19,7 +19,6 @@ from google.protobuf import json_format from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO -from google.auth import credentials as ga_credentials # type: ignore import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union @@ -45,7 +44,7 @@ class _BaseAssetServiceRestTransport(AssetServiceTransport): def __init__(self, *, host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, + credentials: Optional[Any] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, url_scheme: str = 'https', @@ -55,7 +54,7 @@ def __init__(self, *, Args: host (Optional[str]): The hostname to connect to (default: 'cloudasset.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The + credentials (Optional[Any]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the @@ -987,6 +986,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): diff --git a/tests/integration/goldens/asset/noxfile.py b/tests/integration/goldens/asset/noxfile.py index 3e4f7c4fe1..2ca01db629 100755 --- a/tests/integration/goldens/asset/noxfile.py +++ b/tests/integration/goldens/asset/noxfile.py @@ -29,7 +29,8 @@ "3.9", "3.10", "3.11", - "3.12" + "3.12", + "3.13", ] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -39,7 +40,7 @@ BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.12" +DEFAULT_PYTHON_VERSION = "3.13" nox.sessions = [ "unit", @@ -61,7 +62,7 @@ def unit(session, protobuf_implementation): """Run the unit test suite.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') @@ -94,7 +95,7 @@ def unit(session, protobuf_implementation): def prerelease_deps(session, protobuf_implementation): """Run the unit test suite against pre-release versions of dependencies.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install test environment dependencies @@ -129,7 +130,8 @@ def prerelease_deps(session, protobuf_implementation): "googleapis-common-protos", "google-api-core", "google-auth", - "grpcio", + # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 + "grpcio!=1.67.0rc1", "grpcio-status", "protobuf", "proto-plus", diff --git a/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json b/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json index 5a90dfa88b..211efb19c2 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json +++ b/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json @@ -43,7 +43,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -119,7 +119,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -196,7 +196,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.AnalyzeIamPolicyResponse", @@ -272,7 +272,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.AnalyzeIamPolicyResponse", @@ -349,7 +349,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.AnalyzeMoveResponse", @@ -425,7 +425,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.AnalyzeMoveResponse", @@ -514,7 +514,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPoliciesAsyncPager", @@ -602,7 +602,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPoliciesPager", @@ -691,7 +691,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager", @@ -779,7 +779,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedAssetsPager", @@ -868,7 +868,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager", @@ -956,7 +956,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedContainersPager", @@ -1033,7 +1033,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.BatchGetAssetsHistoryResponse", @@ -1109,7 +1109,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.BatchGetAssetsHistoryResponse", @@ -1186,7 +1186,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesResponse", @@ -1262,7 +1262,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesResponse", @@ -1343,7 +1343,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.Feed", @@ -1423,7 +1423,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.Feed", @@ -1512,7 +1512,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.SavedQuery", @@ -1600,7 +1600,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.SavedQuery", @@ -1681,7 +1681,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_feed" @@ -1758,7 +1758,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_feed" @@ -1836,7 +1836,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_saved_query" @@ -1913,7 +1913,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_saved_query" @@ -1987,7 +1987,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -2063,7 +2063,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -2144,7 +2144,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.Feed", @@ -2224,7 +2224,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.Feed", @@ -2305,7 +2305,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.SavedQuery", @@ -2385,7 +2385,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.SavedQuery", @@ -2466,7 +2466,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.services.asset_service.pagers.ListAssetsAsyncPager", @@ -2546,7 +2546,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.services.asset_service.pagers.ListAssetsPager", @@ -2627,7 +2627,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.ListFeedsResponse", @@ -2707,7 +2707,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.ListFeedsResponse", @@ -2788,7 +2788,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.services.asset_service.pagers.ListSavedQueriesAsyncPager", @@ -2868,7 +2868,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.services.asset_service.pagers.ListSavedQueriesPager", @@ -2945,7 +2945,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.QueryAssetsResponse", @@ -3021,7 +3021,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.QueryAssetsResponse", @@ -3106,7 +3106,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.services.asset_service.pagers.SearchAllIamPoliciesAsyncPager", @@ -3190,7 +3190,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.services.asset_service.pagers.SearchAllIamPoliciesPager", @@ -3279,7 +3279,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.services.asset_service.pagers.SearchAllResourcesAsyncPager", @@ -3367,7 +3367,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.services.asset_service.pagers.SearchAllResourcesPager", @@ -3448,7 +3448,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.Feed", @@ -3528,7 +3528,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.Feed", @@ -3613,7 +3613,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.SavedQuery", @@ -3697,7 +3697,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.SavedQuery", diff --git a/tests/integration/goldens/asset/setup.py b/tests/integration/goldens/asset/setup.py index 931351be5f..db3e65c6ea 100755 --- a/tests/integration/goldens/asset/setup.py +++ b/tests/integration/goldens/asset/setup.py @@ -44,11 +44,14 @@ # See https://github.com/googleapis/google-cloud-python/issues/12364 "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", + "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "google-cloud-access-context-manager >= 0.1.2, <1.0.0dev", "google-cloud-os-config >= 1.0.0, <2.0.0dev", "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", ] +extras = { +} url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-asset" package_root = os.path.abspath(os.path.dirname(__file__)) @@ -84,6 +87,7 @@ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Operating System :: OS Independent", "Topic :: Internet", ], @@ -91,6 +95,7 @@ packages=packages, python_requires=">=3.7", install_requires=dependencies, + extras_require=extras, include_package_data=True, zip_safe=False, ) diff --git a/tests/integration/goldens/asset/testing/constraints-3.13.txt b/tests/integration/goldens/asset/testing/constraints-3.13.txt new file mode 100755 index 0000000000..70744e5897 --- /dev/null +++ b/tests/integration/goldens/asset/testing/constraints-3.13.txt @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +google-cloud-access-context-manager +google-cloud-os-config +grpc-google-iam-v1 diff --git a/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 34fcf042a0..53393897d0 100755 --- a/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -23,7 +23,7 @@ import grpc from grpc.experimental import aio -from collections.abc import Iterable +from collections.abc import Iterable, AsyncIterable from google.protobuf import json_format import json import math @@ -78,6 +78,13 @@ } CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -194,6 +201,7 @@ def test__get_universe_domain(): AssetServiceClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." + @pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ (401, CRED_INFO_JSON, True), (403, CRED_INFO_JSON, True), @@ -860,25 +868,6 @@ def test_export_assets(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_export_assets_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_assets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.export_assets() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ExportAssetsRequest() - - def test_export_assets_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -943,29 +932,6 @@ def test_export_assets_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_export_assets_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.export_assets() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ExportAssetsRequest() - - @pytest.mark.asyncio async def test_export_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1040,7 +1006,6 @@ async def test_export_assets_async(transport: str = 'grpc_asyncio', request_type async def test_export_assets_async_from_dict(): await test_export_assets_async(request_type=dict) - def test_export_assets_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1139,25 +1104,6 @@ def test_list_assets(request_type, transport: str = 'grpc'): assert response.next_page_token == 'next_page_token_value' -def test_list_assets_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_assets() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ListAssetsRequest() - - def test_list_assets_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1219,29 +1165,6 @@ def test_list_assets_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_assets_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListAssetsResponse( - next_page_token='next_page_token_value', - )) - await client.list_assets() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ListAssetsRequest() - - @pytest.mark.asyncio async def test_list_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1312,7 +1235,6 @@ async def test_list_assets_async(transport: str = 'grpc_asyncio', request_type=a async def test_list_assets_async_from_dict(): await test_list_assets_async(request_type=dict) - def test_list_assets_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1687,25 +1609,6 @@ def test_batch_get_assets_history(request_type, transport: str = 'grpc'): assert isinstance(response, asset_service.BatchGetAssetsHistoryResponse) -def test_batch_get_assets_history_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_assets_history), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.batch_get_assets_history() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.BatchGetAssetsHistoryRequest() - - def test_batch_get_assets_history_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1765,28 +1668,6 @@ def test_batch_get_assets_history_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_batch_get_assets_history_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_assets_history), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetAssetsHistoryResponse( - )) - await client.batch_get_assets_history() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.BatchGetAssetsHistoryRequest() - - @pytest.mark.asyncio async def test_batch_get_assets_history_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1855,7 +1736,6 @@ async def test_batch_get_assets_history_async(transport: str = 'grpc_asyncio', r async def test_batch_get_assets_history_async_from_dict(): await test_batch_get_assets_history_async(request_type=dict) - def test_batch_get_assets_history_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1962,25 +1842,6 @@ def test_create_feed(request_type, transport: str = 'grpc'): assert response.relationship_types == ['relationship_types_value'] -def test_create_feed_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_feed() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.CreateFeedRequest() - - def test_create_feed_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2042,33 +1903,6 @@ def test_create_feed_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_feed_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - )) - await client.create_feed() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.CreateFeedRequest() - - @pytest.mark.asyncio async def test_create_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2147,7 +1981,6 @@ async def test_create_feed_async(transport: str = 'grpc_asyncio', request_type=a async def test_create_feed_async_from_dict(): await test_create_feed_async(request_type=dict) - def test_create_feed_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2336,25 +2169,6 @@ def test_get_feed(request_type, transport: str = 'grpc'): assert response.relationship_types == ['relationship_types_value'] -def test_get_feed_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_feed() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.GetFeedRequest() - - def test_get_feed_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2414,33 +2228,6 @@ def test_get_feed_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_feed_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - )) - await client.get_feed() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.GetFeedRequest() - - @pytest.mark.asyncio async def test_get_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2519,7 +2306,6 @@ async def test_get_feed_async(transport: str = 'grpc_asyncio', request_type=asse async def test_get_feed_async_from_dict(): await test_get_feed_async(request_type=dict) - def test_get_feed_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2698,25 +2484,6 @@ def test_list_feeds(request_type, transport: str = 'grpc'): assert isinstance(response, asset_service.ListFeedsResponse) -def test_list_feeds_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_feeds() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ListFeedsRequest() - - def test_list_feeds_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2776,28 +2543,6 @@ def test_list_feeds_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_feeds_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListFeedsResponse( - )) - await client.list_feeds() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ListFeedsRequest() - - @pytest.mark.asyncio async def test_list_feeds_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2866,7 +2611,6 @@ async def test_list_feeds_async(transport: str = 'grpc_asyncio', request_type=as async def test_list_feeds_async_from_dict(): await test_list_feeds_async(request_type=dict) - def test_list_feeds_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3055,25 +2799,6 @@ def test_update_feed(request_type, transport: str = 'grpc'): assert response.relationship_types == ['relationship_types_value'] -def test_update_feed_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_feed() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.UpdateFeedRequest() - - def test_update_feed_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3131,33 +2856,6 @@ def test_update_feed_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_feed_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - )) - await client.update_feed() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.UpdateFeedRequest() - - @pytest.mark.asyncio async def test_update_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3236,7 +2934,6 @@ async def test_update_feed_async(transport: str = 'grpc_asyncio', request_type=a async def test_update_feed_async_from_dict(): await test_update_feed_async(request_type=dict) - def test_update_feed_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3414,46 +3111,27 @@ def test_delete_feed(request_type, transport: str = 'grpc'): assert response is None -def test_delete_feed_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_delete_feed_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = asset_service.DeleteFeedRequest( + name='name_value', + ) + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_feed), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_feed() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.DeleteFeedRequest() - - -def test_delete_feed_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.DeleteFeedRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_feed(request=request) + client.delete_feed(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.DeleteFeedRequest( @@ -3492,27 +3170,6 @@ def test_delete_feed_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_feed_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_feed() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.DeleteFeedRequest() - - @pytest.mark.asyncio async def test_delete_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3580,7 +3237,6 @@ async def test_delete_feed_async(transport: str = 'grpc_asyncio', request_type=a async def test_delete_feed_async_from_dict(): await test_delete_feed_async(request_type=dict) - def test_delete_feed_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3761,25 +3417,6 @@ def test_search_all_resources(request_type, transport: str = 'grpc'): assert response.next_page_token == 'next_page_token_value' -def test_search_all_resources_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.search_all_resources() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.SearchAllResourcesRequest() - - def test_search_all_resources_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3845,29 +3482,6 @@ def test_search_all_resources_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_search_all_resources_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllResourcesResponse( - next_page_token='next_page_token_value', - )) - await client.search_all_resources() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.SearchAllResourcesRequest() - - @pytest.mark.asyncio async def test_search_all_resources_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3938,7 +3552,6 @@ async def test_search_all_resources_async(transport: str = 'grpc_asyncio', reque async def test_search_all_resources_async_from_dict(): await test_search_all_resources_async(request_type=dict) - def test_search_all_resources_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4335,25 +3948,6 @@ def test_search_all_iam_policies(request_type, transport: str = 'grpc'): assert response.next_page_token == 'next_page_token_value' -def test_search_all_iam_policies_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.search_all_iam_policies() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.SearchAllIamPoliciesRequest() - - def test_search_all_iam_policies_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4419,29 +4013,6 @@ def test_search_all_iam_policies_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_search_all_iam_policies_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllIamPoliciesResponse( - next_page_token='next_page_token_value', - )) - await client.search_all_iam_policies() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.SearchAllIamPoliciesRequest() - - @pytest.mark.asyncio async def test_search_all_iam_policies_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4512,7 +4083,6 @@ async def test_search_all_iam_policies_async(transport: str = 'grpc_asyncio', re async def test_search_all_iam_policies_async_from_dict(): await test_search_all_iam_policies_async(request_type=dict) - def test_search_all_iam_policies_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4899,25 +4469,6 @@ def test_analyze_iam_policy(request_type, transport: str = 'grpc'): assert response.fully_explored is True -def test_analyze_iam_policy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_iam_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.analyze_iam_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeIamPolicyRequest() - - def test_analyze_iam_policy_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4977,29 +4528,6 @@ def test_analyze_iam_policy_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_analyze_iam_policy_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeIamPolicyResponse( - fully_explored=True, - )) - await client.analyze_iam_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeIamPolicyRequest() - - @pytest.mark.asyncio async def test_analyze_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5070,7 +4598,6 @@ async def test_analyze_iam_policy_async(transport: str = 'grpc_asyncio', request async def test_analyze_iam_policy_async_from_dict(): await test_analyze_iam_policy_async(request_type=dict) - def test_analyze_iam_policy_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5166,25 +4693,6 @@ def test_analyze_iam_policy_longrunning(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_analyze_iam_policy_longrunning_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_iam_policy_longrunning), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.analyze_iam_policy_longrunning() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeIamPolicyLongrunningRequest() - - def test_analyze_iam_policy_longrunning_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -5249,29 +4757,6 @@ def test_analyze_iam_policy_longrunning_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_analyze_iam_policy_longrunning_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_iam_policy_longrunning), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.analyze_iam_policy_longrunning() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeIamPolicyLongrunningRequest() - - @pytest.mark.asyncio async def test_analyze_iam_policy_longrunning_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5346,7 +4831,6 @@ async def test_analyze_iam_policy_longrunning_async(transport: str = 'grpc_async async def test_analyze_iam_policy_longrunning_async_from_dict(): await test_analyze_iam_policy_longrunning_async(request_type=dict) - def test_analyze_iam_policy_longrunning_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5443,25 +4927,6 @@ def test_analyze_move(request_type, transport: str = 'grpc'): assert isinstance(response, asset_service.AnalyzeMoveResponse) -def test_analyze_move_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_move), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.analyze_move() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeMoveRequest() - - def test_analyze_move_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -5523,28 +4988,6 @@ def test_analyze_move_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_analyze_move_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_move), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeMoveResponse( - )) - await client.analyze_move() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeMoveRequest() - - @pytest.mark.asyncio async def test_analyze_move_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5613,7 +5056,6 @@ async def test_analyze_move_async(transport: str = 'grpc_asyncio', request_type= async def test_analyze_move_async_from_dict(): await test_analyze_move_async(request_type=dict) - def test_analyze_move_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5714,25 +5156,6 @@ def test_query_assets(request_type, transport: str = 'grpc'): assert response.done is True -def test_query_assets_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.query_assets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.query_assets() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.QueryAssetsRequest() - - def test_query_assets_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -5798,30 +5221,6 @@ def test_query_assets_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_query_assets_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.query_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.QueryAssetsResponse( - job_reference='job_reference_value', - done=True, - )) - await client.query_assets() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.QueryAssetsRequest() - - @pytest.mark.asyncio async def test_query_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5894,7 +5293,6 @@ async def test_query_assets_async(transport: str = 'grpc_asyncio', request_type= async def test_query_assets_async_from_dict(): await test_query_assets_async(request_type=dict) - def test_query_assets_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5999,25 +5397,6 @@ def test_create_saved_query(request_type, transport: str = 'grpc'): assert response.last_updater == 'last_updater_value' -def test_create_saved_query_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_saved_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.CreateSavedQueryRequest() - - def test_create_saved_query_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -6079,32 +5458,6 @@ def test_create_saved_query_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_saved_query_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - )) - await client.create_saved_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.CreateSavedQueryRequest() - - @pytest.mark.asyncio async def test_create_saved_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6181,7 +5534,6 @@ async def test_create_saved_query_async(transport: str = 'grpc_asyncio', request async def test_create_saved_query_async_from_dict(): await test_create_saved_query_async(request_type=dict) - def test_create_saved_query_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6388,25 +5740,6 @@ def test_get_saved_query(request_type, transport: str = 'grpc'): assert response.last_updater == 'last_updater_value' -def test_get_saved_query_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_saved_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.GetSavedQueryRequest() - - def test_get_saved_query_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -6466,32 +5799,6 @@ def test_get_saved_query_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_saved_query_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - )) - await client.get_saved_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.GetSavedQueryRequest() - - @pytest.mark.asyncio async def test_get_saved_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6568,7 +5875,6 @@ async def test_get_saved_query_async(transport: str = 'grpc_asyncio', request_ty async def test_get_saved_query_async_from_dict(): await test_get_saved_query_async(request_type=dict) - def test_get_saved_query_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6749,25 +6055,6 @@ def test_list_saved_queries(request_type, transport: str = 'grpc'): assert response.next_page_token == 'next_page_token_value' -def test_list_saved_queries_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_saved_queries() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ListSavedQueriesRequest() - - def test_list_saved_queries_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -6831,29 +6118,6 @@ def test_list_saved_queries_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_saved_queries_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListSavedQueriesResponse( - next_page_token='next_page_token_value', - )) - await client.list_saved_queries() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ListSavedQueriesRequest() - - @pytest.mark.asyncio async def test_list_saved_queries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6924,7 +6188,6 @@ async def test_list_saved_queries_async(transport: str = 'grpc_asyncio', request async def test_list_saved_queries_async_from_dict(): await test_list_saved_queries_async(request_type=dict) - def test_list_saved_queries_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7307,25 +6570,6 @@ def test_update_saved_query(request_type, transport: str = 'grpc'): assert response.last_updater == 'last_updater_value' -def test_update_saved_query_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_saved_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.UpdateSavedQueryRequest() - - def test_update_saved_query_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -7383,32 +6627,6 @@ def test_update_saved_query_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_saved_query_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - )) - await client.update_saved_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.UpdateSavedQueryRequest() - - @pytest.mark.asyncio async def test_update_saved_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7485,7 +6703,6 @@ async def test_update_saved_query_async(transport: str = 'grpc_asyncio', request async def test_update_saved_query_async_from_dict(): await test_update_saved_query_async(request_type=dict) - def test_update_saved_query_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7673,25 +6890,6 @@ def test_delete_saved_query(request_type, transport: str = 'grpc'): assert response is None -def test_delete_saved_query_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_saved_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.DeleteSavedQueryRequest() - - def test_delete_saved_query_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -7751,27 +6949,6 @@ def test_delete_saved_query_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_saved_query_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_saved_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.DeleteSavedQueryRequest() - - @pytest.mark.asyncio async def test_delete_saved_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7839,7 +7016,6 @@ async def test_delete_saved_query_async(transport: str = 'grpc_asyncio', request async def test_delete_saved_query_async_from_dict(): await test_delete_saved_query_async(request_type=dict) - def test_delete_saved_query_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8018,25 +7194,6 @@ def test_batch_get_effective_iam_policies(request_type, transport: str = 'grpc') assert isinstance(response, asset_service.BatchGetEffectiveIamPoliciesResponse) -def test_batch_get_effective_iam_policies_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_effective_iam_policies), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.batch_get_effective_iam_policies() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.BatchGetEffectiveIamPoliciesRequest() - - def test_batch_get_effective_iam_policies_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -8096,28 +7253,6 @@ def test_batch_get_effective_iam_policies_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_batch_get_effective_iam_policies_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_effective_iam_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetEffectiveIamPoliciesResponse( - )) - await client.batch_get_effective_iam_policies() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.BatchGetEffectiveIamPoliciesRequest() - - @pytest.mark.asyncio async def test_batch_get_effective_iam_policies_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -8186,7 +7321,6 @@ async def test_batch_get_effective_iam_policies_async(transport: str = 'grpc_asy async def test_batch_get_effective_iam_policies_async_from_dict(): await test_batch_get_effective_iam_policies_async(request_type=dict) - def test_batch_get_effective_iam_policies_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8285,25 +7419,6 @@ def test_analyze_org_policies(request_type, transport: str = 'grpc'): assert response.next_page_token == 'next_page_token_value' -def test_analyze_org_policies_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.analyze_org_policies() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeOrgPoliciesRequest() - - def test_analyze_org_policies_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -8369,29 +7484,6 @@ def test_analyze_org_policies_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_analyze_org_policies_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPoliciesResponse( - next_page_token='next_page_token_value', - )) - await client.analyze_org_policies() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeOrgPoliciesRequest() - - @pytest.mark.asyncio async def test_analyze_org_policies_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -8462,7 +7554,6 @@ async def test_analyze_org_policies_async(transport: str = 'grpc_asyncio', reque async def test_analyze_org_policies_async_from_dict(): await test_analyze_org_policies_async(request_type=dict) - def test_analyze_org_policies_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8859,25 +7950,6 @@ def test_analyze_org_policy_governed_containers(request_type, transport: str = ' assert response.next_page_token == 'next_page_token_value' -def test_analyze_org_policy_governed_containers_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.analyze_org_policy_governed_containers() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeOrgPolicyGovernedContainersRequest() - - def test_analyze_org_policy_governed_containers_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -8943,29 +8015,6 @@ def test_analyze_org_policy_governed_containers_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_analyze_org_policy_governed_containers_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - next_page_token='next_page_token_value', - )) - await client.analyze_org_policy_governed_containers() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeOrgPolicyGovernedContainersRequest() - - @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -9036,7 +8085,6 @@ async def test_analyze_org_policy_governed_containers_async(transport: str = 'gr async def test_analyze_org_policy_governed_containers_async_from_dict(): await test_analyze_org_policy_governed_containers_async(request_type=dict) - def test_analyze_org_policy_governed_containers_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9433,25 +8481,6 @@ def test_analyze_org_policy_governed_assets(request_type, transport: str = 'grpc assert response.next_page_token == 'next_page_token_value' -def test_analyze_org_policy_governed_assets_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.analyze_org_policy_governed_assets() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() - - def test_analyze_org_policy_governed_assets_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -9517,29 +8546,6 @@ def test_analyze_org_policy_governed_assets_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_analyze_org_policy_governed_assets_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - next_page_token='next_page_token_value', - )) - await client.analyze_org_policy_governed_assets() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() - - @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -9610,7 +8616,6 @@ async def test_analyze_org_policy_governed_assets_async(transport: str = 'grpc_a async def test_analyze_org_policy_governed_assets_async_from_dict(): await test_analyze_org_policy_governed_assets_async(request_type=dict) - def test_analyze_org_policy_governed_assets_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9973,37 +8978,6 @@ async def test_analyze_org_policy_governed_assets_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - asset_service.ExportAssetsRequest, - dict, -]) -def test_export_assets_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.export_assets(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - def test_export_assets_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -10101,6 +9075,7 @@ def test_export_assets_rest_required_fields(request_type=asset_service.ExportAss response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.export_assets(request) @@ -10117,110 +9092,6 @@ def test_export_assets_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("parent", "outputConfig", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_assets_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_export_assets") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_export_assets") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.ExportAssetsRequest.pb(asset_service.ExportAssetsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = asset_service.ExportAssetsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.export_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_export_assets_rest_bad_request(transport: str = 'rest', request_type=asset_service.ExportAssetsRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.export_assets(request) - - -def test_export_assets_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.ListAssetsRequest, - dict, -]) -def test_list_assets_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.ListAssetsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.ListAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_assets(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAssetsPager) - assert response.next_page_token == 'next_page_token_value' - def test_list_assets_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -10318,6 +9189,7 @@ def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsR response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_assets(request) @@ -10334,66 +9206,6 @@ def test_list_assets_rest_unset_required_fields(): assert set(unset_fields) == (set(("assetTypes", "contentType", "pageSize", "pageToken", "readTime", "relationshipTypes", )) & set(("parent", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_assets_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_assets") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_assets") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.ListAssetsRequest.pb(asset_service.ListAssetsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.ListAssetsResponse.to_json(asset_service.ListAssetsResponse()) - - request = asset_service.ListAssetsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.ListAssetsResponse() - - client.list_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_assets_rest_bad_request(transport: str = 'rest', request_type=asset_service.ListAssetsRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_assets(request) - - def test_list_assets_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10422,6 +9234,7 @@ def test_list_assets_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_assets(**mock_args) @@ -10509,40 +9322,6 @@ def test_list_assets_rest_pager(transport: str = 'rest'): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - asset_service.BatchGetAssetsHistoryRequest, - dict, -]) -def test_batch_get_assets_history_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.BatchGetAssetsHistoryResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.BatchGetAssetsHistoryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.batch_get_assets_history(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.BatchGetAssetsHistoryResponse) - def test_batch_get_assets_history_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -10640,6 +9419,7 @@ def test_batch_get_assets_history_rest_required_fields(request_type=asset_servic response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.batch_get_assets_history(request) @@ -10656,124 +9436,13 @@ def test_batch_get_assets_history_rest_unset_required_fields(): assert set(unset_fields) == (set(("assetNames", "contentType", "readTimeWindow", "relationshipTypes", )) & set(("parent", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_get_assets_history_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_assets_history") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_batch_get_assets_history") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.BatchGetAssetsHistoryRequest.pb(asset_service.BatchGetAssetsHistoryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.BatchGetAssetsHistoryResponse.to_json(asset_service.BatchGetAssetsHistoryResponse()) - - request = asset_service.BatchGetAssetsHistoryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.BatchGetAssetsHistoryResponse() - - client.batch_get_assets_history(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_batch_get_assets_history_rest_bad_request(transport: str = 'rest', request_type=asset_service.BatchGetAssetsHistoryRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.batch_get_assets_history(request) - - -def test_batch_get_assets_history_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.CreateFeedRequest, - dict, -]) -def test_create_feed_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_feed(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] - -def test_create_feed_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", +def test_create_feed_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) # Should wrap all calls on client creation @@ -10867,6 +9536,7 @@ def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedR response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_feed(request) @@ -10883,66 +9553,6 @@ def test_create_feed_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("parent", "feedId", "feed", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_feed_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_feed") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_create_feed") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.CreateFeedRequest.pb(asset_service.CreateFeedRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.Feed.to_json(asset_service.Feed()) - - request = asset_service.CreateFeedRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.Feed() - - client.create_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.CreateFeedRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_feed(request) - - def test_create_feed_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10971,6 +9581,7 @@ def test_create_feed_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_feed(**mock_args) @@ -10996,57 +9607,6 @@ def test_create_feed_rest_flattened_error(transport: str = 'rest'): ) -def test_create_feed_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.GetFeedRequest, - dict, -]) -def test_get_feed_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_feed(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] - def test_get_feed_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -11142,6 +9702,7 @@ def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_feed(request) @@ -11158,66 +9719,6 @@ def test_get_feed_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_feed_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_feed") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_get_feed") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.GetFeedRequest.pb(asset_service.GetFeedRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.Feed.to_json(asset_service.Feed()) - - request = asset_service.GetFeedRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.Feed() - - client.get_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.GetFeedRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_feed(request) - - def test_get_feed_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11246,6 +9747,7 @@ def test_get_feed_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_feed(**mock_args) @@ -11271,47 +9773,6 @@ def test_get_feed_rest_flattened_error(transport: str = 'rest'): ) -def test_get_feed_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.ListFeedsRequest, - dict, -]) -def test_list_feeds_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.ListFeedsResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.ListFeedsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_feeds(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.ListFeedsResponse) - def test_list_feeds_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -11407,6 +9868,7 @@ def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsReq response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_feeds(request) @@ -11423,66 +9885,6 @@ def test_list_feeds_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("parent", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_feeds_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_feeds") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_feeds") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.ListFeedsRequest.pb(asset_service.ListFeedsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.ListFeedsResponse.to_json(asset_service.ListFeedsResponse()) - - request = asset_service.ListFeedsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.ListFeedsResponse() - - client.list_feeds(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_feeds_rest_bad_request(transport: str = 'rest', request_type=asset_service.ListFeedsRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_feeds(request) - - def test_list_feeds_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11511,6 +9913,7 @@ def test_list_feeds_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_feeds(**mock_args) @@ -11536,57 +9939,6 @@ def test_list_feeds_rest_flattened_error(transport: str = 'rest'): ) -def test_list_feeds_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.UpdateFeedRequest, - dict, -]) -def test_update_feed_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_feed(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] - def test_update_feed_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -11678,6 +10030,7 @@ def test_update_feed_rest_required_fields(request_type=asset_service.UpdateFeedR response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_feed(request) @@ -11694,66 +10047,6 @@ def test_update_feed_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("feed", "updateMask", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_feed_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_feed") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_update_feed") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.UpdateFeedRequest.pb(asset_service.UpdateFeedRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.Feed.to_json(asset_service.Feed()) - - request = asset_service.UpdateFeedRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.Feed() - - client.update_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.UpdateFeedRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_feed(request) - - def test_update_feed_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11782,6 +10075,7 @@ def test_update_feed_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.update_feed(**mock_args) @@ -11807,44 +10101,6 @@ def test_update_feed_rest_flattened_error(transport: str = 'rest'): ) -def test_update_feed_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.DeleteFeedRequest, - dict, -]) -def test_delete_feed_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_feed(request) - - # Establish that the response is the type that we expect. - assert response is None - def test_delete_feed_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -11937,6 +10193,7 @@ def test_delete_feed_rest_required_fields(request_type=asset_service.DeleteFeedR response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_feed(request) @@ -11953,71 +10210,16 @@ def test_delete_feed_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_feed_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_delete_feed") as pre: - pre.assert_not_called() - pb_message = asset_service.DeleteFeedRequest.pb(asset_service.DeleteFeedRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = asset_service.DeleteFeedRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.DeleteFeedRequest): +def test_delete_feed_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_feed(request) - - -def test_delete_feed_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None # get arguments that satisfy an http rule for this method sample_request = {'name': 'sample1/sample2/feeds/sample3'} @@ -12034,6 +10236,7 @@ def test_delete_feed_rest_flattened(): json_return_value = '' response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_feed(**mock_args) @@ -12059,49 +10262,6 @@ def test_delete_feed_rest_flattened_error(transport: str = 'rest'): ) -def test_delete_feed_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.SearchAllResourcesRequest, - dict, -]) -def test_search_all_resources_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.SearchAllResourcesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.SearchAllResourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.search_all_resources(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchAllResourcesPager) - assert response.next_page_token == 'next_page_token_value' - def test_search_all_resources_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -12199,6 +10359,7 @@ def test_search_all_resources_rest_required_fields(request_type=asset_service.Se response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.search_all_resources(request) @@ -12215,66 +10376,6 @@ def test_search_all_resources_rest_unset_required_fields(): assert set(unset_fields) == (set(("assetTypes", "orderBy", "pageSize", "pageToken", "query", "readMask", )) & set(("scope", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_search_all_resources_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_resources") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_search_all_resources") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.SearchAllResourcesRequest.pb(asset_service.SearchAllResourcesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.SearchAllResourcesResponse.to_json(asset_service.SearchAllResourcesResponse()) - - request = asset_service.SearchAllResourcesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.SearchAllResourcesResponse() - - client.search_all_resources(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_search_all_resources_rest_bad_request(transport: str = 'rest', request_type=asset_service.SearchAllResourcesRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.search_all_resources(request) - - def test_search_all_resources_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12305,6 +10406,7 @@ def test_search_all_resources_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.search_all_resources(**mock_args) @@ -12394,42 +10496,6 @@ def test_search_all_resources_rest_pager(transport: str = 'rest'): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - asset_service.SearchAllIamPoliciesRequest, - dict, -]) -def test_search_all_iam_policies_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.SearchAllIamPoliciesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.search_all_iam_policies(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchAllIamPoliciesPager) - assert response.next_page_token == 'next_page_token_value' - def test_search_all_iam_policies_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -12527,6 +10593,7 @@ def test_search_all_iam_policies_rest_required_fields(request_type=asset_service response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.search_all_iam_policies(request) @@ -12543,66 +10610,6 @@ def test_search_all_iam_policies_rest_unset_required_fields(): assert set(unset_fields) == (set(("assetTypes", "orderBy", "pageSize", "pageToken", "query", )) & set(("scope", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_search_all_iam_policies_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_iam_policies") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_search_all_iam_policies") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.SearchAllIamPoliciesRequest.pb(asset_service.SearchAllIamPoliciesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.SearchAllIamPoliciesResponse.to_json(asset_service.SearchAllIamPoliciesResponse()) - - request = asset_service.SearchAllIamPoliciesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.SearchAllIamPoliciesResponse() - - client.search_all_iam_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_search_all_iam_policies_rest_bad_request(transport: str = 'rest', request_type=asset_service.SearchAllIamPoliciesRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.search_all_iam_policies(request) - - def test_search_all_iam_policies_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12632,6 +10639,7 @@ def test_search_all_iam_policies_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.search_all_iam_policies(**mock_args) @@ -12720,42 +10728,6 @@ def test_search_all_iam_policies_rest_pager(transport: str = 'rest'): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeIamPolicyRequest, - dict, -]) -def test_analyze_iam_policy_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeIamPolicyResponse( - fully_explored=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.AnalyzeIamPolicyResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.analyze_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.AnalyzeIamPolicyResponse) - assert response.fully_explored is True - def test_analyze_iam_policy_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -12848,6 +10820,7 @@ def test_analyze_iam_policy_rest_required_fields(request_type=asset_service.Anal response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_iam_policy(request) @@ -12864,111 +10837,13 @@ def test_analyze_iam_policy_rest_unset_required_fields(): assert set(unset_fields) == (set(("analysisQuery", "executionTimeout", "savedAnalysisQuery", )) & set(("analysisQuery", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_iam_policy_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.AnalyzeIamPolicyRequest.pb(asset_service.AnalyzeIamPolicyRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.AnalyzeIamPolicyResponse.to_json(asset_service.AnalyzeIamPolicyResponse()) - - request = asset_service.AnalyzeIamPolicyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.AnalyzeIamPolicyResponse() - - client.analyze_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_analyze_iam_policy_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeIamPolicyRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.analyze_iam_policy(request) - - -def test_analyze_iam_policy_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeIamPolicyLongrunningRequest, - dict, -]) -def test_analyze_iam_policy_longrunning_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.analyze_iam_policy_longrunning(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - -def test_analyze_iam_policy_longrunning_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", +def test_analyze_iam_policy_longrunning_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) # Should wrap all calls on client creation @@ -13054,6 +10929,7 @@ def test_analyze_iam_policy_longrunning_rest_required_fields(request_type=asset_ response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_iam_policy_longrunning(request) @@ -13070,108 +10946,6 @@ def test_analyze_iam_policy_longrunning_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("analysisQuery", "outputConfig", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy_longrunning") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy_longrunning") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.AnalyzeIamPolicyLongrunningRequest.pb(asset_service.AnalyzeIamPolicyLongrunningRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = asset_service.AnalyzeIamPolicyLongrunningRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.analyze_iam_policy_longrunning(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_analyze_iam_policy_longrunning_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.analyze_iam_policy_longrunning(request) - - -def test_analyze_iam_policy_longrunning_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeMoveRequest, - dict, -]) -def test_analyze_move_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'resource': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeMoveResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.AnalyzeMoveResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.analyze_move(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.AnalyzeMoveResponse) - def test_analyze_move_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -13276,6 +11050,7 @@ def test_analyze_move_rest_required_fields(request_type=asset_service.AnalyzeMov response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_move(request) @@ -13296,165 +11071,60 @@ def test_analyze_move_rest_unset_required_fields(): assert set(unset_fields) == (set(("destinationParent", "view", )) & set(("resource", "destinationParent", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_move_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), +def test_query_assets_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_move") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_move") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.AnalyzeMoveRequest.pb(asset_service.AnalyzeMoveRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.AnalyzeMoveResponse.to_json(asset_service.AnalyzeMoveResponse()) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - request = asset_service.AnalyzeMoveRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.AnalyzeMoveResponse() + # Ensure method has been cached + assert client._transport.query_assets in client._transport._wrapped_methods - client.analyze_move(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.query_assets] = mock_rpc - pre.assert_called_once() - post.assert_called_once() + request = {} + client.query_assets(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -def test_analyze_move_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeMoveRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + client.query_assets(request) - # send a request that will satisfy transcoding - request_init = {'resource': 'sample1/sample2'} + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_query_assets_rest_required_fields(request_type=asset_service.QueryAssetsRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request_init["parent"] = "" request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.analyze_move(request) + # verify fields with default values are dropped + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).query_assets._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -def test_analyze_move_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) + # verify required fields with default values are now present - -@pytest.mark.parametrize("request_type", [ - asset_service.QueryAssetsRequest, - dict, -]) -def test_query_assets_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.QueryAssetsResponse( - job_reference='job_reference_value', - done=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.QueryAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.query_assets(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.QueryAssetsResponse) - assert response.job_reference == 'job_reference_value' - assert response.done is True - -def test_query_assets_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.query_assets in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.query_assets] = mock_rpc - - request = {} - client.query_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.query_assets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_query_assets_rest_required_fields(request_type=asset_service.QueryAssetsRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).query_assets._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = 'parent_value' unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).query_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) @@ -13497,6 +11167,7 @@ def test_query_assets_rest_required_fields(request_type=asset_service.QueryAsset response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.query_assets(request) @@ -13513,179 +11184,6 @@ def test_query_assets_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("parent", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_query_assets_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_query_assets") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_query_assets") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.QueryAssetsRequest.pb(asset_service.QueryAssetsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.QueryAssetsResponse.to_json(asset_service.QueryAssetsResponse()) - - request = asset_service.QueryAssetsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.QueryAssetsResponse() - - client.query_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_query_assets_rest_bad_request(transport: str = 'rest', request_type=asset_service.QueryAssetsRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.query_assets(request) - - -def test_query_assets_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.CreateSavedQueryRequest, - dict, -]) -def test_create_saved_query_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request_init["saved_query"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'creator': 'creator_value', 'last_update_time': {}, 'last_updater': 'last_updater_value', 'labels': {}, 'content': {'iam_policy_analysis_query': {'scope': 'scope_value', 'resource_selector': {'full_resource_name': 'full_resource_name_value'}, 'identity_selector': {'identity': 'identity_value'}, 'access_selector': {'roles': ['roles_value1', 'roles_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}, 'options': {'expand_groups': True, 'expand_roles': True, 'expand_resources': True, 'output_resource_edges': True, 'output_group_edges': True, 'analyze_service_account_impersonation': True}, 'condition_context': {'access_time': {}}}}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = asset_service.CreateSavedQueryRequest.meta.fields["saved_query"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["saved_query"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["saved_query"][field])): - del request_init["saved_query"][field][i][subfield] - else: - del request_init["saved_query"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.SavedQuery.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_saved_query(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' - def test_create_saved_query_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -13791,6 +11289,7 @@ def test_create_saved_query_rest_required_fields(request_type=asset_service.Crea response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_saved_query(request) @@ -13811,68 +11310,8 @@ def test_create_saved_query_rest_unset_required_fields(): assert set(unset_fields) == (set(("savedQueryId", )) & set(("parent", "savedQuery", "savedQueryId", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_saved_query_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_saved_query") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_create_saved_query") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.CreateSavedQueryRequest.pb(asset_service.CreateSavedQueryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.SavedQuery.to_json(asset_service.SavedQuery()) - - request = asset_service.CreateSavedQueryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.SavedQuery() - - client.create_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_saved_query_rest_bad_request(transport: str = 'rest', request_type=asset_service.CreateSavedQueryRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_saved_query(request) - - -def test_create_saved_query_rest_flattened(): - client = AssetServiceClient( +def test_create_saved_query_rest_flattened(): + client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -13901,6 +11340,7 @@ def test_create_saved_query_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_saved_query(**mock_args) @@ -13928,55 +11368,6 @@ def test_create_saved_query_rest_flattened_error(transport: str = 'rest'): ) -def test_create_saved_query_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.GetSavedQueryRequest, - dict, -]) -def test_get_saved_query_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/savedQueries/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.SavedQuery.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_saved_query(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' - def test_get_saved_query_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -14072,6 +11463,7 @@ def test_get_saved_query_rest_required_fields(request_type=asset_service.GetSave response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_saved_query(request) @@ -14088,66 +11480,6 @@ def test_get_saved_query_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_saved_query_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_saved_query") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_get_saved_query") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.GetSavedQueryRequest.pb(asset_service.GetSavedQueryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.SavedQuery.to_json(asset_service.SavedQuery()) - - request = asset_service.GetSavedQueryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.SavedQuery() - - client.get_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_saved_query_rest_bad_request(transport: str = 'rest', request_type=asset_service.GetSavedQueryRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/savedQueries/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_saved_query(request) - - def test_get_saved_query_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14176,6 +11508,7 @@ def test_get_saved_query_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_saved_query(**mock_args) @@ -14201,49 +11534,6 @@ def test_get_saved_query_rest_flattened_error(transport: str = 'rest'): ) -def test_get_saved_query_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.ListSavedQueriesRequest, - dict, -]) -def test_list_saved_queries_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.ListSavedQueriesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.ListSavedQueriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_saved_queries(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSavedQueriesPager) - assert response.next_page_token == 'next_page_token_value' - def test_list_saved_queries_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -14341,6 +11631,7 @@ def test_list_saved_queries_rest_required_fields(request_type=asset_service.List response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_saved_queries(request) @@ -14357,66 +11648,6 @@ def test_list_saved_queries_rest_unset_required_fields(): assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_saved_queries_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_saved_queries") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_saved_queries") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.ListSavedQueriesRequest.pb(asset_service.ListSavedQueriesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.ListSavedQueriesResponse.to_json(asset_service.ListSavedQueriesResponse()) - - request = asset_service.ListSavedQueriesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.ListSavedQueriesResponse() - - client.list_saved_queries(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_saved_queries_rest_bad_request(transport: str = 'rest', request_type=asset_service.ListSavedQueriesRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_saved_queries(request) - - def test_list_saved_queries_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14445,6 +11676,7 @@ def test_list_saved_queries_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_saved_queries(**mock_args) @@ -14532,120 +11764,14 @@ def test_list_saved_queries_rest_pager(transport: str = 'rest'): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - asset_service.UpdateSavedQueryRequest, - dict, -]) -def test_update_saved_query_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'saved_query': {'name': 'sample1/sample2/savedQueries/sample3'}} - request_init["saved_query"] = {'name': 'sample1/sample2/savedQueries/sample3', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'creator': 'creator_value', 'last_update_time': {}, 'last_updater': 'last_updater_value', 'labels': {}, 'content': {'iam_policy_analysis_query': {'scope': 'scope_value', 'resource_selector': {'full_resource_name': 'full_resource_name_value'}, 'identity_selector': {'identity': 'identity_value'}, 'access_selector': {'roles': ['roles_value1', 'roles_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}, 'options': {'expand_groups': True, 'expand_roles': True, 'expand_resources': True, 'output_resource_edges': True, 'output_group_edges': True, 'analyze_service_account_impersonation': True}, 'condition_context': {'access_time': {}}}}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = asset_service.UpdateSavedQueryRequest.meta.fields["saved_query"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["saved_query"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["saved_query"][field])): - del request_init["saved_query"][field][i][subfield] - else: - del request_init["saved_query"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.SavedQuery.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_saved_query(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' - -def test_update_saved_query_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +def test_update_saved_query_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) # Should wrap all calls on client creation assert wrapper_fn.call_count > 0 @@ -14731,6 +11857,7 @@ def test_update_saved_query_rest_required_fields(request_type=asset_service.Upda response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_saved_query(request) @@ -14747,66 +11874,6 @@ def test_update_saved_query_rest_unset_required_fields(): assert set(unset_fields) == (set(("updateMask", )) & set(("savedQuery", "updateMask", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_saved_query_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_saved_query") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_update_saved_query") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.UpdateSavedQueryRequest.pb(asset_service.UpdateSavedQueryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.SavedQuery.to_json(asset_service.SavedQuery()) - - request = asset_service.UpdateSavedQueryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.SavedQuery() - - client.update_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_saved_query_rest_bad_request(transport: str = 'rest', request_type=asset_service.UpdateSavedQueryRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'saved_query': {'name': 'sample1/sample2/savedQueries/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_saved_query(request) - - def test_update_saved_query_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14836,6 +11903,7 @@ def test_update_saved_query_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.update_saved_query(**mock_args) @@ -14862,44 +11930,6 @@ def test_update_saved_query_rest_flattened_error(transport: str = 'rest'): ) -def test_update_saved_query_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.DeleteSavedQueryRequest, - dict, -]) -def test_delete_saved_query_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/savedQueries/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_saved_query(request) - - # Establish that the response is the type that we expect. - assert response is None - def test_delete_saved_query_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -14992,6 +12022,7 @@ def test_delete_saved_query_rest_required_fields(request_type=asset_service.Dele response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_saved_query(request) @@ -15008,61 +12039,6 @@ def test_delete_saved_query_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_saved_query_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_delete_saved_query") as pre: - pre.assert_not_called() - pb_message = asset_service.DeleteSavedQueryRequest.pb(asset_service.DeleteSavedQueryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = asset_service.DeleteSavedQueryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_saved_query_rest_bad_request(transport: str = 'rest', request_type=asset_service.DeleteSavedQueryRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/savedQueries/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_saved_query(request) - - def test_delete_saved_query_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15089,6 +12065,7 @@ def test_delete_saved_query_rest_flattened(): json_return_value = '' response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_saved_query(**mock_args) @@ -15114,59 +12091,18 @@ def test_delete_saved_query_rest_flattened_error(transport: str = 'rest'): ) -def test_delete_saved_query_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) +def test_batch_get_effective_iam_policies_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - -@pytest.mark.parametrize("request_type", [ - asset_service.BatchGetEffectiveIamPoliciesRequest, - dict, -]) -def test_batch_get_effective_iam_policies_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.BatchGetEffectiveIamPoliciesResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.batch_get_effective_iam_policies(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.BatchGetEffectiveIamPoliciesResponse) - -def test_batch_get_effective_iam_policies_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() # Ensure method has been cached assert client._transport.batch_get_effective_iam_policies in client._transport._wrapped_methods @@ -15259,6 +12195,7 @@ def test_batch_get_effective_iam_policies_rest_required_fields(request_type=asse response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.batch_get_effective_iam_policies(request) @@ -15279,109 +12216,6 @@ def test_batch_get_effective_iam_policies_rest_unset_required_fields(): assert set(unset_fields) == (set(("names", )) & set(("scope", "names", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_get_effective_iam_policies_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_effective_iam_policies") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_batch_get_effective_iam_policies") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.BatchGetEffectiveIamPoliciesRequest.pb(asset_service.BatchGetEffectiveIamPoliciesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.BatchGetEffectiveIamPoliciesResponse.to_json(asset_service.BatchGetEffectiveIamPoliciesResponse()) - - request = asset_service.BatchGetEffectiveIamPoliciesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() - - client.batch_get_effective_iam_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_batch_get_effective_iam_policies_rest_bad_request(transport: str = 'rest', request_type=asset_service.BatchGetEffectiveIamPoliciesRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.batch_get_effective_iam_policies(request) - - -def test_batch_get_effective_iam_policies_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeOrgPoliciesRequest, - dict, -]) -def test_analyze_org_policies_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeOrgPoliciesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.AnalyzeOrgPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.analyze_org_policies(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.AnalyzeOrgPoliciesPager) - assert response.next_page_token == 'next_page_token_value' - def test_analyze_org_policies_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -15486,6 +12320,7 @@ def test_analyze_org_policies_rest_required_fields(request_type=asset_service.An response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_org_policies(request) @@ -15506,66 +12341,6 @@ def test_analyze_org_policies_rest_unset_required_fields(): assert set(unset_fields) == (set(("constraint", "filter", "pageSize", "pageToken", )) & set(("scope", "constraint", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_org_policies_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policies") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_org_policies") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.AnalyzeOrgPoliciesRequest.pb(asset_service.AnalyzeOrgPoliciesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.AnalyzeOrgPoliciesResponse.to_json(asset_service.AnalyzeOrgPoliciesResponse()) - - request = asset_service.AnalyzeOrgPoliciesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.AnalyzeOrgPoliciesResponse() - - client.analyze_org_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_analyze_org_policies_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeOrgPoliciesRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.analyze_org_policies(request) - - def test_analyze_org_policies_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15596,6 +12371,7 @@ def test_analyze_org_policies_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.analyze_org_policies(**mock_args) @@ -15685,42 +12461,6 @@ def test_analyze_org_policies_rest_pager(transport: str = 'rest'): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeOrgPolicyGovernedContainersRequest, - dict, -]) -def test_analyze_org_policy_governed_containers_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.analyze_org_policy_governed_containers(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedContainersPager) - assert response.next_page_token == 'next_page_token_value' - def test_analyze_org_policy_governed_containers_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -15825,6 +12565,7 @@ def test_analyze_org_policy_governed_containers_rest_required_fields(request_typ response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_org_policy_governed_containers(request) @@ -15845,76 +12586,16 @@ def test_analyze_org_policy_governed_containers_rest_unset_required_fields(): assert set(unset_fields) == (set(("constraint", "filter", "pageSize", "pageToken", )) & set(("scope", "constraint", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_org_policy_governed_containers_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( +def test_analyze_org_policy_governed_containers_rest_flattened(): + client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_containers") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_org_policy_governed_containers") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.AnalyzeOrgPolicyGovernedContainersRequest.pb(asset_service.AnalyzeOrgPolicyGovernedContainersRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } + transport="rest", + ) - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.to_json(asset_service.AnalyzeOrgPolicyGovernedContainersResponse()) - - request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() - - client.analyze_org_policy_governed_containers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_analyze_org_policy_governed_containers_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.analyze_org_policy_governed_containers(request) - - -def test_analyze_org_policy_governed_containers_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() # get arguments that satisfy an http rule for this method sample_request = {'scope': 'sample1/sample2'} @@ -15935,6 +12616,7 @@ def test_analyze_org_policy_governed_containers_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.analyze_org_policy_governed_containers(**mock_args) @@ -16024,42 +12706,6 @@ def test_analyze_org_policy_governed_containers_rest_pager(transport: str = 'res assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, - dict, -]) -def test_analyze_org_policy_governed_assets_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.analyze_org_policy_governed_assets(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedAssetsPager) - assert response.next_page_token == 'next_page_token_value' - def test_analyze_org_policy_governed_assets_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -16164,6 +12810,7 @@ def test_analyze_org_policy_governed_assets_rest_required_fields(request_type=as response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_org_policy_governed_assets(request) @@ -16184,66 +12831,6 @@ def test_analyze_org_policy_governed_assets_rest_unset_required_fields(): assert set(unset_fields) == (set(("constraint", "filter", "pageSize", "pageToken", )) & set(("scope", "constraint", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_org_policy_governed_assets_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_assets") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_org_policy_governed_assets") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.pb(asset_service.AnalyzeOrgPolicyGovernedAssetsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.to_json(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse()) - - request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() - - client.analyze_org_policy_governed_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_analyze_org_policy_governed_assets_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.analyze_org_policy_governed_assets(request) - - def test_analyze_org_policy_governed_assets_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16274,6 +12861,7 @@ def test_analyze_org_policy_governed_assets_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.analyze_org_policy_governed_assets(**mock_args) @@ -16353,123 +12941,4445 @@ def test_analyze_org_policy_governed_assets_rest_pager(transport: str = 'rest'): pager = client.analyze_org_policy_governed_assets(request=sample_request) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset) - for i in results) + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset) + for i in results) + + pages = list(client.analyze_org_policy_governed_assets(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.AssetServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.AssetServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AssetServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.AssetServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AssetServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AssetServiceClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.AssetServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AssetServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.AssetServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = AssetServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.AssetServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.AssetServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.AssetServiceGrpcTransport, + transports.AssetServiceGrpcAsyncIOTransport, + transports.AssetServiceRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = AssetServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_export_assets_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.export_assets), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.export_assets(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.ExportAssetsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_assets_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + call.return_value = asset_service.ListAssetsResponse() + client.list_assets(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.ListAssetsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_get_assets_history_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_assets_history), + '__call__') as call: + call.return_value = asset_service.BatchGetAssetsHistoryResponse() + client.batch_get_assets_history(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.BatchGetAssetsHistoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_feed_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_feed), + '__call__') as call: + call.return_value = asset_service.Feed() + client.create_feed(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.CreateFeedRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_feed_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_feed), + '__call__') as call: + call.return_value = asset_service.Feed() + client.get_feed(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.GetFeedRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_feeds_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_feeds), + '__call__') as call: + call.return_value = asset_service.ListFeedsResponse() + client.list_feeds(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.ListFeedsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_feed_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_feed), + '__call__') as call: + call.return_value = asset_service.Feed() + client.update_feed(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.UpdateFeedRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_feed_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_feed), + '__call__') as call: + call.return_value = None + client.delete_feed(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.DeleteFeedRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_search_all_resources_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.search_all_resources), + '__call__') as call: + call.return_value = asset_service.SearchAllResourcesResponse() + client.search_all_resources(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.SearchAllResourcesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_search_all_iam_policies_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.search_all_iam_policies), + '__call__') as call: + call.return_value = asset_service.SearchAllIamPoliciesResponse() + client.search_all_iam_policies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.SearchAllIamPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_analyze_iam_policy_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.analyze_iam_policy), + '__call__') as call: + call.return_value = asset_service.AnalyzeIamPolicyResponse() + client.analyze_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.AnalyzeIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_analyze_iam_policy_longrunning_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.analyze_iam_policy_longrunning), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.analyze_iam_policy_longrunning(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.AnalyzeIamPolicyLongrunningRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_analyze_move_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.analyze_move), + '__call__') as call: + call.return_value = asset_service.AnalyzeMoveResponse() + client.analyze_move(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.AnalyzeMoveRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_query_assets_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.query_assets), + '__call__') as call: + call.return_value = asset_service.QueryAssetsResponse() + client.query_assets(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.QueryAssetsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_saved_query_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_saved_query), + '__call__') as call: + call.return_value = asset_service.SavedQuery() + client.create_saved_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.CreateSavedQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_saved_query_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_saved_query), + '__call__') as call: + call.return_value = asset_service.SavedQuery() + client.get_saved_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.GetSavedQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_saved_queries_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__') as call: + call.return_value = asset_service.ListSavedQueriesResponse() + client.list_saved_queries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.ListSavedQueriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_saved_query_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_saved_query), + '__call__') as call: + call.return_value = asset_service.SavedQuery() + client.update_saved_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.UpdateSavedQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_saved_query_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_saved_query), + '__call__') as call: + call.return_value = None + client.delete_saved_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.DeleteSavedQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_get_effective_iam_policies_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_effective_iam_policies), + '__call__') as call: + call.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() + client.batch_get_effective_iam_policies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.BatchGetEffectiveIamPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_analyze_org_policies_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__') as call: + call.return_value = asset_service.AnalyzeOrgPoliciesResponse() + client.analyze_org_policies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.AnalyzeOrgPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_analyze_org_policy_governed_containers_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__') as call: + call.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() + client.analyze_org_policy_governed_containers(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_analyze_org_policy_governed_assets_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__') as call: + call.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() + client.analyze_org_policy_governed_assets(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = AssetServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_export_assets_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.export_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.export_assets(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.ExportAssetsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_assets_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListAssetsResponse( + next_page_token='next_page_token_value', + )) + await client.list_assets(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.ListAssetsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_batch_get_assets_history_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_assets_history), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetAssetsHistoryResponse( + )) + await client.batch_get_assets_history(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.BatchGetAssetsHistoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_feed_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_feed), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], + )) + await client.create_feed(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.CreateFeedRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_feed_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_feed), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], + )) + await client.get_feed(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.GetFeedRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_feeds_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_feeds), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListFeedsResponse( + )) + await client.list_feeds(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.ListFeedsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_feed_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_feed), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], + )) + await client.update_feed(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.UpdateFeedRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_feed_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_feed), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_feed(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.DeleteFeedRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_search_all_resources_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.search_all_resources), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllResourcesResponse( + next_page_token='next_page_token_value', + )) + await client.search_all_resources(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.SearchAllResourcesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_search_all_iam_policies_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.search_all_iam_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllIamPoliciesResponse( + next_page_token='next_page_token_value', + )) + await client.search_all_iam_policies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.SearchAllIamPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_analyze_iam_policy_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.analyze_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeIamPolicyResponse( + fully_explored=True, + )) + await client.analyze_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.AnalyzeIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_analyze_iam_policy_longrunning_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.analyze_iam_policy_longrunning), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.analyze_iam_policy_longrunning(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.AnalyzeIamPolicyLongrunningRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_analyze_move_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.analyze_move), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeMoveResponse( + )) + await client.analyze_move(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.AnalyzeMoveRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_query_assets_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.query_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.QueryAssetsResponse( + job_reference='job_reference_value', + done=True, + )) + await client.query_assets(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.QueryAssetsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_saved_query_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( + name='name_value', + description='description_value', + creator='creator_value', + last_updater='last_updater_value', + )) + await client.create_saved_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.CreateSavedQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_saved_query_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( + name='name_value', + description='description_value', + creator='creator_value', + last_updater='last_updater_value', + )) + await client.get_saved_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.GetSavedQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_saved_queries_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListSavedQueriesResponse( + next_page_token='next_page_token_value', + )) + await client.list_saved_queries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.ListSavedQueriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_saved_query_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( + name='name_value', + description='description_value', + creator='creator_value', + last_updater='last_updater_value', + )) + await client.update_saved_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.UpdateSavedQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_saved_query_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_saved_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.DeleteSavedQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_batch_get_effective_iam_policies_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_effective_iam_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetEffectiveIamPoliciesResponse( + )) + await client.batch_get_effective_iam_policies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.BatchGetEffectiveIamPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_analyze_org_policies_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPoliciesResponse( + next_page_token='next_page_token_value', + )) + await client.analyze_org_policies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.AnalyzeOrgPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_containers_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + next_page_token='next_page_token_value', + )) + await client.analyze_org_policy_governed_containers(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_assets_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + next_page_token='next_page_token_value', + )) + await client.analyze_org_policy_governed_assets(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = AssetServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_export_assets_rest_bad_request(request_type=asset_service.ExportAssetsRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.export_assets(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.ExportAssetsRequest, + dict, +]) +def test_export_assets_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.export_assets(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_export_assets_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_export_assets") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_export_assets_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_export_assets") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = asset_service.ExportAssetsRequest.pb(asset_service.ExportAssetsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = asset_service.ExportAssetsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.export_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_assets_rest_bad_request(request_type=asset_service.ListAssetsRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_assets(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.ListAssetsRequest, + dict, +]) +def test_list_assets_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.ListAssetsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.ListAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_assets(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAssetsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_assets_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_assets") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_assets_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_assets") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = asset_service.ListAssetsRequest.pb(asset_service.ListAssetsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = asset_service.ListAssetsResponse.to_json(asset_service.ListAssetsResponse()) + req.return_value.content = return_value + + request = asset_service.ListAssetsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.ListAssetsResponse() + post_with_metadata.return_value = asset_service.ListAssetsResponse(), metadata + + client.list_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_batch_get_assets_history_rest_bad_request(request_type=asset_service.BatchGetAssetsHistoryRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.batch_get_assets_history(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.BatchGetAssetsHistoryRequest, + dict, +]) +def test_batch_get_assets_history_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.BatchGetAssetsHistoryResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.BatchGetAssetsHistoryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.batch_get_assets_history(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.BatchGetAssetsHistoryResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_get_assets_history_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_assets_history") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_assets_history_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_batch_get_assets_history") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = asset_service.BatchGetAssetsHistoryRequest.pb(asset_service.BatchGetAssetsHistoryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = asset_service.BatchGetAssetsHistoryResponse.to_json(asset_service.BatchGetAssetsHistoryResponse()) + req.return_value.content = return_value + + request = asset_service.BatchGetAssetsHistoryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.BatchGetAssetsHistoryResponse() + post_with_metadata.return_value = asset_service.BatchGetAssetsHistoryResponse(), metadata + + client.batch_get_assets_history(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_feed_rest_bad_request(request_type=asset_service.CreateFeedRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_feed(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.CreateFeedRequest, + dict, +]) +def test_create_feed_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_feed(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.Feed) + assert response.name == 'name_value' + assert response.asset_names == ['asset_names_value'] + assert response.asset_types == ['asset_types_value'] + assert response.content_type == asset_service.ContentType.RESOURCE + assert response.relationship_types == ['relationship_types_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_feed_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_feed") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_feed_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_create_feed") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = asset_service.CreateFeedRequest.pb(asset_service.CreateFeedRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = asset_service.Feed.to_json(asset_service.Feed()) + req.return_value.content = return_value + + request = asset_service.CreateFeedRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.Feed() + post_with_metadata.return_value = asset_service.Feed(), metadata + + client.create_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_feed_rest_bad_request(request_type=asset_service.GetFeedRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/feeds/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_feed(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.GetFeedRequest, + dict, +]) +def test_get_feed_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/feeds/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_feed(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.Feed) + assert response.name == 'name_value' + assert response.asset_names == ['asset_names_value'] + assert response.asset_types == ['asset_types_value'] + assert response.content_type == asset_service.ContentType.RESOURCE + assert response.relationship_types == ['relationship_types_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_feed_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_feed") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_feed_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_get_feed") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = asset_service.GetFeedRequest.pb(asset_service.GetFeedRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = asset_service.Feed.to_json(asset_service.Feed()) + req.return_value.content = return_value + + request = asset_service.GetFeedRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.Feed() + post_with_metadata.return_value = asset_service.Feed(), metadata + + client.get_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_feeds_rest_bad_request(request_type=asset_service.ListFeedsRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_feeds(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.ListFeedsRequest, + dict, +]) +def test_list_feeds_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.ListFeedsResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.ListFeedsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_feeds(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.ListFeedsResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_feeds_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_feeds") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_feeds_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_feeds") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = asset_service.ListFeedsRequest.pb(asset_service.ListFeedsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = asset_service.ListFeedsResponse.to_json(asset_service.ListFeedsResponse()) + req.return_value.content = return_value + + request = asset_service.ListFeedsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.ListFeedsResponse() + post_with_metadata.return_value = asset_service.ListFeedsResponse(), metadata + + client.list_feeds(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_feed_rest_bad_request(request_type=asset_service.UpdateFeedRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_feed(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.UpdateFeedRequest, + dict, +]) +def test_update_feed_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_feed(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.Feed) + assert response.name == 'name_value' + assert response.asset_names == ['asset_names_value'] + assert response.asset_types == ['asset_types_value'] + assert response.content_type == asset_service.ContentType.RESOURCE + assert response.relationship_types == ['relationship_types_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_feed_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_feed") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_feed_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_update_feed") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = asset_service.UpdateFeedRequest.pb(asset_service.UpdateFeedRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = asset_service.Feed.to_json(asset_service.Feed()) + req.return_value.content = return_value + + request = asset_service.UpdateFeedRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.Feed() + post_with_metadata.return_value = asset_service.Feed(), metadata + + client.update_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_feed_rest_bad_request(request_type=asset_service.DeleteFeedRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/feeds/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_feed(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.DeleteFeedRequest, + dict, +]) +def test_delete_feed_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/feeds/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_feed(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_feed_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_delete_feed") as pre: + pre.assert_not_called() + pb_message = asset_service.DeleteFeedRequest.pb(asset_service.DeleteFeedRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = asset_service.DeleteFeedRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_search_all_resources_rest_bad_request(request_type=asset_service.SearchAllResourcesRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.search_all_resources(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.SearchAllResourcesRequest, + dict, +]) +def test_search_all_resources_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.SearchAllResourcesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.SearchAllResourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.search_all_resources(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchAllResourcesPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_search_all_resources_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_resources") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_resources_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_search_all_resources") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = asset_service.SearchAllResourcesRequest.pb(asset_service.SearchAllResourcesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = asset_service.SearchAllResourcesResponse.to_json(asset_service.SearchAllResourcesResponse()) + req.return_value.content = return_value + + request = asset_service.SearchAllResourcesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.SearchAllResourcesResponse() + post_with_metadata.return_value = asset_service.SearchAllResourcesResponse(), metadata + + client.search_all_resources(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_search_all_iam_policies_rest_bad_request(request_type=asset_service.SearchAllIamPoliciesRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.search_all_iam_policies(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.SearchAllIamPoliciesRequest, + dict, +]) +def test_search_all_iam_policies_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.SearchAllIamPoliciesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.search_all_iam_policies(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchAllIamPoliciesPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_search_all_iam_policies_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_iam_policies") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_iam_policies_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_search_all_iam_policies") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = asset_service.SearchAllIamPoliciesRequest.pb(asset_service.SearchAllIamPoliciesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = asset_service.SearchAllIamPoliciesResponse.to_json(asset_service.SearchAllIamPoliciesResponse()) + req.return_value.content = return_value + + request = asset_service.SearchAllIamPoliciesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.SearchAllIamPoliciesResponse() + post_with_metadata.return_value = asset_service.SearchAllIamPoliciesResponse(), metadata + + client.search_all_iam_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_analyze_iam_policy_rest_bad_request(request_type=asset_service.AnalyzeIamPolicyRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.analyze_iam_policy(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.AnalyzeIamPolicyRequest, + dict, +]) +def test_analyze_iam_policy_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.AnalyzeIamPolicyResponse( + fully_explored=True, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.AnalyzeIamPolicyResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.analyze_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.AnalyzeIamPolicyResponse) + assert response.fully_explored is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_iam_policy_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = asset_service.AnalyzeIamPolicyRequest.pb(asset_service.AnalyzeIamPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = asset_service.AnalyzeIamPolicyResponse.to_json(asset_service.AnalyzeIamPolicyResponse()) + req.return_value.content = return_value + + request = asset_service.AnalyzeIamPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.AnalyzeIamPolicyResponse() + post_with_metadata.return_value = asset_service.AnalyzeIamPolicyResponse(), metadata + + client.analyze_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_analyze_iam_policy_longrunning_rest_bad_request(request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.analyze_iam_policy_longrunning(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.AnalyzeIamPolicyLongrunningRequest, + dict, +]) +def test_analyze_iam_policy_longrunning_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.analyze_iam_policy_longrunning(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy_longrunning") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy_longrunning_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy_longrunning") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = asset_service.AnalyzeIamPolicyLongrunningRequest.pb(asset_service.AnalyzeIamPolicyLongrunningRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = asset_service.AnalyzeIamPolicyLongrunningRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.analyze_iam_policy_longrunning(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_analyze_move_rest_bad_request(request_type=asset_service.AnalyzeMoveRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'resource': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.analyze_move(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.AnalyzeMoveRequest, + dict, +]) +def test_analyze_move_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'resource': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.AnalyzeMoveResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.AnalyzeMoveResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.analyze_move(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.AnalyzeMoveResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_move_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_move") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_move_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_move") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = asset_service.AnalyzeMoveRequest.pb(asset_service.AnalyzeMoveRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = asset_service.AnalyzeMoveResponse.to_json(asset_service.AnalyzeMoveResponse()) + req.return_value.content = return_value + + request = asset_service.AnalyzeMoveRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.AnalyzeMoveResponse() + post_with_metadata.return_value = asset_service.AnalyzeMoveResponse(), metadata + + client.analyze_move(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_query_assets_rest_bad_request(request_type=asset_service.QueryAssetsRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.query_assets(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.QueryAssetsRequest, + dict, +]) +def test_query_assets_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.QueryAssetsResponse( + job_reference='job_reference_value', + done=True, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.QueryAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.query_assets(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.QueryAssetsResponse) + assert response.job_reference == 'job_reference_value' + assert response.done is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_query_assets_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_query_assets") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_query_assets_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_query_assets") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = asset_service.QueryAssetsRequest.pb(asset_service.QueryAssetsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = asset_service.QueryAssetsResponse.to_json(asset_service.QueryAssetsResponse()) + req.return_value.content = return_value + + request = asset_service.QueryAssetsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.QueryAssetsResponse() + post_with_metadata.return_value = asset_service.QueryAssetsResponse(), metadata + + client.query_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_saved_query_rest_bad_request(request_type=asset_service.CreateSavedQueryRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_saved_query(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.CreateSavedQueryRequest, + dict, +]) +def test_create_saved_query_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request_init["saved_query"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'creator': 'creator_value', 'last_update_time': {}, 'last_updater': 'last_updater_value', 'labels': {}, 'content': {'iam_policy_analysis_query': {'scope': 'scope_value', 'resource_selector': {'full_resource_name': 'full_resource_name_value'}, 'identity_selector': {'identity': 'identity_value'}, 'access_selector': {'roles': ['roles_value1', 'roles_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}, 'options': {'expand_groups': True, 'expand_roles': True, 'expand_resources': True, 'output_resource_edges': True, 'output_group_edges': True, 'analyze_service_account_impersonation': True}, 'condition_context': {'access_time': {}}}}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = asset_service.CreateSavedQueryRequest.meta.fields["saved_query"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["saved_query"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["saved_query"][field])): + del request_init["saved_query"][field][i][subfield] + else: + del request_init["saved_query"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.SavedQuery( + name='name_value', + description='description_value', + creator='creator_value', + last_updater='last_updater_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.SavedQuery.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_saved_query(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.SavedQuery) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.creator == 'creator_value' + assert response.last_updater == 'last_updater_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_saved_query_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_saved_query") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_saved_query_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_create_saved_query") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = asset_service.CreateSavedQueryRequest.pb(asset_service.CreateSavedQueryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = asset_service.SavedQuery.to_json(asset_service.SavedQuery()) + req.return_value.content = return_value + + request = asset_service.CreateSavedQueryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.SavedQuery() + post_with_metadata.return_value = asset_service.SavedQuery(), metadata + + client.create_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_saved_query_rest_bad_request(request_type=asset_service.GetSavedQueryRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/savedQueries/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_saved_query(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.GetSavedQueryRequest, + dict, +]) +def test_get_saved_query_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/savedQueries/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.SavedQuery( + name='name_value', + description='description_value', + creator='creator_value', + last_updater='last_updater_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.SavedQuery.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_saved_query(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.SavedQuery) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.creator == 'creator_value' + assert response.last_updater == 'last_updater_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_saved_query_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_saved_query") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_saved_query_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_get_saved_query") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = asset_service.GetSavedQueryRequest.pb(asset_service.GetSavedQueryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = asset_service.SavedQuery.to_json(asset_service.SavedQuery()) + req.return_value.content = return_value + + request = asset_service.GetSavedQueryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.SavedQuery() + post_with_metadata.return_value = asset_service.SavedQuery(), metadata + + client.get_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_saved_queries_rest_bad_request(request_type=asset_service.ListSavedQueriesRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_saved_queries(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.ListSavedQueriesRequest, + dict, +]) +def test_list_saved_queries_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.ListSavedQueriesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.ListSavedQueriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_saved_queries(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSavedQueriesPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_saved_queries_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_saved_queries") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_saved_queries_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_saved_queries") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = asset_service.ListSavedQueriesRequest.pb(asset_service.ListSavedQueriesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = asset_service.ListSavedQueriesResponse.to_json(asset_service.ListSavedQueriesResponse()) + req.return_value.content = return_value + + request = asset_service.ListSavedQueriesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.ListSavedQueriesResponse() + post_with_metadata.return_value = asset_service.ListSavedQueriesResponse(), metadata + + client.list_saved_queries(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_saved_query_rest_bad_request(request_type=asset_service.UpdateSavedQueryRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'saved_query': {'name': 'sample1/sample2/savedQueries/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_saved_query(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.UpdateSavedQueryRequest, + dict, +]) +def test_update_saved_query_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'saved_query': {'name': 'sample1/sample2/savedQueries/sample3'}} + request_init["saved_query"] = {'name': 'sample1/sample2/savedQueries/sample3', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'creator': 'creator_value', 'last_update_time': {}, 'last_updater': 'last_updater_value', 'labels': {}, 'content': {'iam_policy_analysis_query': {'scope': 'scope_value', 'resource_selector': {'full_resource_name': 'full_resource_name_value'}, 'identity_selector': {'identity': 'identity_value'}, 'access_selector': {'roles': ['roles_value1', 'roles_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}, 'options': {'expand_groups': True, 'expand_roles': True, 'expand_resources': True, 'output_resource_edges': True, 'output_group_edges': True, 'analyze_service_account_impersonation': True}, 'condition_context': {'access_time': {}}}}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = asset_service.UpdateSavedQueryRequest.meta.fields["saved_query"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["saved_query"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["saved_query"][field])): + del request_init["saved_query"][field][i][subfield] + else: + del request_init["saved_query"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.SavedQuery( + name='name_value', + description='description_value', + creator='creator_value', + last_updater='last_updater_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.SavedQuery.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_saved_query(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.SavedQuery) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.creator == 'creator_value' + assert response.last_updater == 'last_updater_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_saved_query_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_saved_query") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_saved_query_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_update_saved_query") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = asset_service.UpdateSavedQueryRequest.pb(asset_service.UpdateSavedQueryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = asset_service.SavedQuery.to_json(asset_service.SavedQuery()) + req.return_value.content = return_value + + request = asset_service.UpdateSavedQueryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.SavedQuery() + post_with_metadata.return_value = asset_service.SavedQuery(), metadata + + client.update_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_saved_query_rest_bad_request(request_type=asset_service.DeleteSavedQueryRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/savedQueries/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_saved_query(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.DeleteSavedQueryRequest, + dict, +]) +def test_delete_saved_query_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/savedQueries/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_saved_query(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_saved_query_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_delete_saved_query") as pre: + pre.assert_not_called() + pb_message = asset_service.DeleteSavedQueryRequest.pb(asset_service.DeleteSavedQueryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = asset_service.DeleteSavedQueryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_batch_get_effective_iam_policies_rest_bad_request(request_type=asset_service.BatchGetEffectiveIamPoliciesRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.batch_get_effective_iam_policies(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.BatchGetEffectiveIamPoliciesRequest, + dict, +]) +def test_batch_get_effective_iam_policies_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.BatchGetEffectiveIamPoliciesResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.batch_get_effective_iam_policies(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.BatchGetEffectiveIamPoliciesResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_get_effective_iam_policies_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_effective_iam_policies") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_effective_iam_policies_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_batch_get_effective_iam_policies") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = asset_service.BatchGetEffectiveIamPoliciesRequest.pb(asset_service.BatchGetEffectiveIamPoliciesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.to_json(asset_service.BatchGetEffectiveIamPoliciesResponse()) + req.return_value.content = return_value + + request = asset_service.BatchGetEffectiveIamPoliciesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() + post_with_metadata.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse(), metadata + + client.batch_get_effective_iam_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_analyze_org_policies_rest_bad_request(request_type=asset_service.AnalyzeOrgPoliciesRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.analyze_org_policies(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.AnalyzeOrgPoliciesRequest, + dict, +]) +def test_analyze_org_policies_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.AnalyzeOrgPoliciesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.AnalyzeOrgPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.analyze_org_policies(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AnalyzeOrgPoliciesPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_org_policies_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policies") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policies_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_org_policies") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = asset_service.AnalyzeOrgPoliciesRequest.pb(asset_service.AnalyzeOrgPoliciesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = asset_service.AnalyzeOrgPoliciesResponse.to_json(asset_service.AnalyzeOrgPoliciesResponse()) + req.return_value.content = return_value + + request = asset_service.AnalyzeOrgPoliciesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.AnalyzeOrgPoliciesResponse() + post_with_metadata.return_value = asset_service.AnalyzeOrgPoliciesResponse(), metadata + + client.analyze_org_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_analyze_org_policy_governed_containers_rest_bad_request(request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.analyze_org_policy_governed_containers(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.AnalyzeOrgPolicyGovernedContainersRequest, + dict, +]) +def test_analyze_org_policy_governed_containers_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.analyze_org_policy_governed_containers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedContainersPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_org_policy_governed_containers_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_containers") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_containers_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_org_policy_governed_containers") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = asset_service.AnalyzeOrgPolicyGovernedContainersRequest.pb(asset_service.AnalyzeOrgPolicyGovernedContainersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.to_json(asset_service.AnalyzeOrgPolicyGovernedContainersResponse()) + req.return_value.content = return_value + + request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() + post_with_metadata.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse(), metadata + + client.analyze_org_policy_governed_containers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_analyze_org_policy_governed_assets_rest_bad_request(request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.analyze_org_policy_governed_assets(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, + dict, +]) +def test_analyze_org_policy_governed_assets_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.analyze_org_policy_governed_assets(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedAssetsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_org_policy_governed_assets_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_assets") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_assets_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_org_policy_governed_assets") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.pb(asset_service.AnalyzeOrgPolicyGovernedAssetsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.to_json(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse()) + req.return_value.content = return_value + + request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() + post_with_metadata.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse(), metadata + + client.analyze_org_policy_governed_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'sample1/sample2/operations/sample3/sample4'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'sample1/sample2/operations/sample3/sample4'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_initialize_client_w_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_export_assets_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.export_assets), + '__call__') as call: + client.export_assets(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.ExportAssetsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_assets_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + client.list_assets(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.ListAssetsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_get_assets_history_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_assets_history), + '__call__') as call: + client.batch_get_assets_history(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.BatchGetAssetsHistoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_feed_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_feed), + '__call__') as call: + client.create_feed(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.CreateFeedRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_feed_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_feed), + '__call__') as call: + client.get_feed(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.GetFeedRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_feeds_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_feeds), + '__call__') as call: + client.list_feeds(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.ListFeedsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_feed_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_feed), + '__call__') as call: + client.update_feed(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.UpdateFeedRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_feed_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_feed), + '__call__') as call: + client.delete_feed(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.DeleteFeedRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_search_all_resources_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.search_all_resources), + '__call__') as call: + client.search_all_resources(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.SearchAllResourcesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_search_all_iam_policies_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.search_all_iam_policies), + '__call__') as call: + client.search_all_iam_policies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.SearchAllIamPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_analyze_iam_policy_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.analyze_iam_policy), + '__call__') as call: + client.analyze_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.AnalyzeIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_analyze_iam_policy_longrunning_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.analyze_iam_policy_longrunning), + '__call__') as call: + client.analyze_iam_policy_longrunning(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.AnalyzeIamPolicyLongrunningRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_analyze_move_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.analyze_move), + '__call__') as call: + client.analyze_move(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.AnalyzeMoveRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_query_assets_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.query_assets), + '__call__') as call: + client.query_assets(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.QueryAssetsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_saved_query_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_saved_query), + '__call__') as call: + client.create_saved_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.CreateSavedQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_saved_query_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_saved_query), + '__call__') as call: + client.get_saved_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.GetSavedQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_saved_queries_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__') as call: + client.list_saved_queries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.ListSavedQueriesRequest() - pages = list(client.analyze_org_policy_governed_assets(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token + assert args[0] == request_msg -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.AssetServiceGrpcTransport( +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_saved_query_empty_call_rest(): + client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AssetServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_saved_query), + '__call__') as call: + client.update_saved_query(request=None) - # It is an error to provide an api_key and a transport instance. - transport = transports.AssetServiceGrpcTransport( + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.UpdateSavedQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_saved_query_empty_call_rest(): + client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = AssetServiceClient( - client_options=options, - transport=transport, - ) - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = AssetServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_saved_query), + '__call__') as call: + client.delete_saved_query(request=None) - # It is an error to provide scopes and a transport instance. - transport = transports.AssetServiceGrpcTransport( + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.DeleteSavedQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_get_effective_iam_policies_empty_call_rest(): + client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = AssetServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_effective_iam_policies), + '__call__') as call: + client.batch_get_effective_iam_policies(request=None) -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.AssetServiceGrpcTransport( + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.BatchGetEffectiveIamPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_analyze_org_policies_empty_call_rest(): + client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - client = AssetServiceClient(transport=transport) - assert client.transport is transport -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.AssetServiceGrpcTransport( + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__') as call: + client.analyze_org_policies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.AnalyzeOrgPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_analyze_org_policy_governed_containers_empty_call_rest(): + client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - channel = transport.grpc_channel - assert channel - transport = transports.AssetServiceGrpcAsyncIOTransport( + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__') as call: + client.analyze_org_policy_governed_containers(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_analyze_org_policy_governed_assets_empty_call_rest(): + client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - channel = transport.grpc_channel - assert channel -@pytest.mark.parametrize("transport_class", [ - transports.AssetServiceGrpcTransport, - transports.AssetServiceGrpcAsyncIOTransport, - transports.AssetServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__') as call: + client.analyze_org_policy_governed_assets(request=None) -def test_transport_kind_grpc(): - transport = AssetServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + assert args[0] == request_msg -def test_transport_kind_grpc_asyncio(): - transport = AssetServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" +def test_asset_service_rest_lro_client(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport -def test_transport_kind_rest(): - transport = AssetServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, +operations_v1.AbstractOperationsClient, ) - assert transport.kind == "rest" + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client def test_transport_grpc_default(): # A client should use the gRPC transport by default. @@ -16720,23 +17630,6 @@ def test_asset_service_http_transport_client_cert_source_for_mtls(): mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -def test_asset_service_rest_lro_client(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - @pytest.mark.parametrize("transport_name", [ "grpc", "grpc_asyncio", @@ -17233,65 +18126,6 @@ def test_client_with_default_client_info(): ) prep.assert_called_once_with(client_info) -@pytest.mark.asyncio -async def test_transport_close_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'sample1/sample2/operations/sample3/sample4'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'sample1/sample2/operations/sample3/sample4'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - def test_get_operation(transport: str = "grpc"): client = AssetServiceClient( @@ -17422,21 +18256,39 @@ async def test_get_operation_from_dict_async(): call.assert_called() -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } +def test_transport_close_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() - for transport, close_name in transports.items(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() def test_client_ctx(): transports = [ diff --git a/tests/integration/goldens/credentials/docs/index.rst b/tests/integration/goldens/credentials/docs/index.rst index 3e271990d6..2113270ae1 100755 --- a/tests/integration/goldens/credentials/docs/index.rst +++ b/tests/integration/goldens/credentials/docs/index.rst @@ -3,5 +3,5 @@ API Reference .. toctree:: :maxdepth: 2 - credentials_v1/services - credentials_v1/types + credentials_v1/services_ + credentials_v1/types_ diff --git a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index 485c5bdee9..3ade51402e 100755 --- a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union @@ -39,6 +40,13 @@ from .transports.grpc_asyncio import IAMCredentialsGrpcAsyncIOTransport from .client import IAMCredentialsClient +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) class IAMCredentialsAsyncClient: """A service account is a special type of Google account that @@ -238,6 +246,20 @@ def __init__(self, *, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.iam.credentials_v1.IAMCredentialsAsyncClient`.", + extra = { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "credentialsType": None, + } + ) + async def generate_access_token(self, request: Optional[Union[common.GenerateAccessTokenRequest, dict]] = None, *, @@ -247,7 +269,7 @@ async def generate_access_token(self, lifetime: Optional[duration_pb2.Duration] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> common.GenerateAccessTokenResponse: r"""Generates an OAuth 2.0 access token for a service account. @@ -336,8 +358,10 @@ async def sample_generate_access_token(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.iam.credentials_v1.types.GenerateAccessTokenResponse: @@ -402,7 +426,7 @@ async def generate_id_token(self, include_email: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> common.GenerateIdTokenResponse: r"""Generates an OpenID Connect ID token for a service account. @@ -485,8 +509,10 @@ async def sample_generate_id_token(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.iam.credentials_v1.types.GenerateIdTokenResponse: @@ -550,7 +576,7 @@ async def sign_blob(self, payload: Optional[bytes] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> common.SignBlobResponse: r"""Signs a blob using a service account's system-managed private key. @@ -622,8 +648,10 @@ async def sample_sign_blob(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.iam.credentials_v1.types.SignBlobResponse: @@ -685,7 +713,7 @@ async def sign_jwt(self, payload: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> common.SignJwtResponse: r"""Signs a JWT using a service account's system-managed private key. @@ -760,8 +788,10 @@ async def sample_sign_jwt(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.iam.credentials_v1.types.SignJwtResponse: diff --git a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 3855cbc18d..afed0499e6 100755 --- a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -16,6 +16,7 @@ from collections import OrderedDict from http import HTTPStatus import json +import logging as std_logging import os import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast @@ -38,6 +39,14 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.iam.credentials_v1.types import common from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -399,33 +408,6 @@ def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_ raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes(client_universe: str, - credentials: ga_credentials.Credentials) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = IAMCredentialsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError("The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default.") - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -435,9 +417,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = (self._is_universe_domain_valid or - IAMCredentialsClient._compare_universes(self.universe_domain, self.transport._credentials)) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True def _add_cred_info_for_auth_errors( self, @@ -552,6 +534,10 @@ def __init__(self, *, # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError("client_options.api_key and credentials are mutually exclusive") @@ -604,6 +590,21 @@ def __init__(self, *, api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.iam.credentials_v1.IAMCredentialsClient`.", + extra = { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "credentialsType": None, + } + ) + def generate_access_token(self, request: Optional[Union[common.GenerateAccessTokenRequest, dict]] = None, *, @@ -613,7 +614,7 @@ def generate_access_token(self, lifetime: Optional[duration_pb2.Duration] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> common.GenerateAccessTokenResponse: r"""Generates an OAuth 2.0 access token for a service account. @@ -702,8 +703,10 @@ def sample_generate_access_token(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.iam.credentials_v1.types.GenerateAccessTokenResponse: @@ -736,7 +739,7 @@ def sample_generate_access_token(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.generate_access_token] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -767,7 +770,7 @@ def generate_id_token(self, include_email: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> common.GenerateIdTokenResponse: r"""Generates an OpenID Connect ID token for a service account. @@ -850,8 +853,10 @@ def sample_generate_id_token(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.iam.credentials_v1.types.GenerateIdTokenResponse: @@ -884,7 +889,7 @@ def sample_generate_id_token(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.generate_id_token] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -914,7 +919,7 @@ def sign_blob(self, payload: Optional[bytes] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> common.SignBlobResponse: r"""Signs a blob using a service account's system-managed private key. @@ -986,8 +991,10 @@ def sample_sign_blob(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.iam.credentials_v1.types.SignBlobResponse: @@ -1018,7 +1025,7 @@ def sample_sign_blob(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.sign_blob] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1048,7 +1055,7 @@ def sign_jwt(self, payload: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> common.SignJwtResponse: r"""Signs a JWT using a service account's system-managed private key. @@ -1123,8 +1130,10 @@ def sample_sign_jwt(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.iam.credentials_v1.types.SignJwtResponse: @@ -1155,7 +1164,7 @@ def sample_sign_jwt(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.sign_jwt] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( diff --git a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py index b0dd5bb669..0e55e29785 100755 --- a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py +++ b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -21,12 +24,83 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.iam.credentials_v1.types import common from .base import IAMCredentialsTransport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class IAMCredentialsGrpcTransport(IAMCredentialsTransport): """gRPC backend transport for IAMCredentials. @@ -186,7 +260,10 @@ def __init__(self, *, ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -260,7 +337,7 @@ def generate_access_token(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'generate_access_token' not in self._stubs: - self._stubs['generate_access_token'] = self.grpc_channel.unary_unary( + self._stubs['generate_access_token'] = self._logged_channel.unary_unary( '/google.iam.credentials.v1.IAMCredentials/GenerateAccessToken', request_serializer=common.GenerateAccessTokenRequest.serialize, response_deserializer=common.GenerateAccessTokenResponse.deserialize, @@ -287,7 +364,7 @@ def generate_id_token(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'generate_id_token' not in self._stubs: - self._stubs['generate_id_token'] = self.grpc_channel.unary_unary( + self._stubs['generate_id_token'] = self._logged_channel.unary_unary( '/google.iam.credentials.v1.IAMCredentials/GenerateIdToken', request_serializer=common.GenerateIdTokenRequest.serialize, response_deserializer=common.GenerateIdTokenResponse.deserialize, @@ -314,7 +391,7 @@ def sign_blob(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'sign_blob' not in self._stubs: - self._stubs['sign_blob'] = self.grpc_channel.unary_unary( + self._stubs['sign_blob'] = self._logged_channel.unary_unary( '/google.iam.credentials.v1.IAMCredentials/SignBlob', request_serializer=common.SignBlobRequest.serialize, response_deserializer=common.SignBlobResponse.deserialize, @@ -341,7 +418,7 @@ def sign_jwt(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'sign_jwt' not in self._stubs: - self._stubs['sign_jwt'] = self.grpc_channel.unary_unary( + self._stubs['sign_jwt'] = self._logged_channel.unary_unary( '/google.iam.credentials.v1.IAMCredentials/SignJwt', request_serializer=common.SignJwtRequest.serialize, response_deserializer=common.SignJwtResponse.deserialize, @@ -349,7 +426,7 @@ def sign_jwt(self) -> Callable[ return self._stubs['sign_jwt'] def close(self): - self.grpc_channel.close() + self._logged_channel.close() @property def kind(self) -> str: diff --git a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py index 8589ca45f0..8ac65146dc 100755 --- a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py +++ b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py @@ -14,6 +14,9 @@ # limitations under the License. # import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -23,14 +26,84 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.iam.credentials_v1.types import common from .base import IAMCredentialsTransport, DEFAULT_CLIENT_INFO from .grpc import IAMCredentialsGrpcTransport +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class IAMCredentialsGrpcAsyncIOTransport(IAMCredentialsTransport): """gRPC AsyncIO backend transport for IAMCredentials. @@ -232,8 +305,11 @@ def __init__(self, *, ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -266,7 +342,7 @@ def generate_access_token(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'generate_access_token' not in self._stubs: - self._stubs['generate_access_token'] = self.grpc_channel.unary_unary( + self._stubs['generate_access_token'] = self._logged_channel.unary_unary( '/google.iam.credentials.v1.IAMCredentials/GenerateAccessToken', request_serializer=common.GenerateAccessTokenRequest.serialize, response_deserializer=common.GenerateAccessTokenResponse.deserialize, @@ -293,7 +369,7 @@ def generate_id_token(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'generate_id_token' not in self._stubs: - self._stubs['generate_id_token'] = self.grpc_channel.unary_unary( + self._stubs['generate_id_token'] = self._logged_channel.unary_unary( '/google.iam.credentials.v1.IAMCredentials/GenerateIdToken', request_serializer=common.GenerateIdTokenRequest.serialize, response_deserializer=common.GenerateIdTokenResponse.deserialize, @@ -320,7 +396,7 @@ def sign_blob(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'sign_blob' not in self._stubs: - self._stubs['sign_blob'] = self.grpc_channel.unary_unary( + self._stubs['sign_blob'] = self._logged_channel.unary_unary( '/google.iam.credentials.v1.IAMCredentials/SignBlob', request_serializer=common.SignBlobRequest.serialize, response_deserializer=common.SignBlobResponse.deserialize, @@ -347,7 +423,7 @@ def sign_jwt(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'sign_jwt' not in self._stubs: - self._stubs['sign_jwt'] = self.grpc_channel.unary_unary( + self._stubs['sign_jwt'] = self._logged_channel.unary_unary( '/google.iam.credentials.v1.IAMCredentials/SignJwt', request_serializer=common.SignJwtRequest.serialize, response_deserializer=common.SignJwtResponse.deserialize, @@ -425,7 +501,7 @@ def _wrap_method(self, func, *args, **kwargs): return gapic_v1.method_async.wrap_method(func, *args, **kwargs) def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() @property def kind(self) -> str: diff --git a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py index 1dc8b94d61..9dc7959ac5 100755 --- a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py +++ b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py @@ -13,9 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging +import json # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries @@ -42,11 +43,18 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, - rest_version=requests_version, + rest_version=f"requests@{requests_version}", ) @@ -102,7 +110,7 @@ def post_sign_jwt(self, response): """ - def pre_generate_access_token(self, request: common.GenerateAccessTokenRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[common.GenerateAccessTokenRequest, Sequence[Tuple[str, str]]]: + def pre_generate_access_token(self, request: common.GenerateAccessTokenRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.GenerateAccessTokenRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for generate_access_token Override in a subclass to manipulate the request or metadata @@ -113,12 +121,32 @@ def pre_generate_access_token(self, request: common.GenerateAccessTokenRequest, def post_generate_access_token(self, response: common.GenerateAccessTokenResponse) -> common.GenerateAccessTokenResponse: """Post-rpc interceptor for generate_access_token - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_access_token_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IAMCredentials server but before - it is returned to user code. + it is returned to user code. This `post_generate_access_token` interceptor runs + before the `post_generate_access_token_with_metadata` interceptor. """ return response - def pre_generate_id_token(self, request: common.GenerateIdTokenRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[common.GenerateIdTokenRequest, Sequence[Tuple[str, str]]]: + + def post_generate_access_token_with_metadata(self, response: common.GenerateAccessTokenResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.GenerateAccessTokenResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for generate_access_token + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IAMCredentials server but before it is returned to user code. + + We recommend only using this `post_generate_access_token_with_metadata` + interceptor in new development instead of the `post_generate_access_token` interceptor. + When both interceptors are used, this `post_generate_access_token_with_metadata` interceptor runs after the + `post_generate_access_token` interceptor. The (possibly modified) response returned by + `post_generate_access_token` will be passed to + `post_generate_access_token_with_metadata`. + """ + return response, metadata + + def pre_generate_id_token(self, request: common.GenerateIdTokenRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.GenerateIdTokenRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for generate_id_token Override in a subclass to manipulate the request or metadata @@ -129,12 +157,32 @@ def pre_generate_id_token(self, request: common.GenerateIdTokenRequest, metadata def post_generate_id_token(self, response: common.GenerateIdTokenResponse) -> common.GenerateIdTokenResponse: """Post-rpc interceptor for generate_id_token - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_id_token_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IAMCredentials server but before - it is returned to user code. + it is returned to user code. This `post_generate_id_token` interceptor runs + before the `post_generate_id_token_with_metadata` interceptor. """ return response - def pre_sign_blob(self, request: common.SignBlobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[common.SignBlobRequest, Sequence[Tuple[str, str]]]: + + def post_generate_id_token_with_metadata(self, response: common.GenerateIdTokenResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.GenerateIdTokenResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for generate_id_token + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IAMCredentials server but before it is returned to user code. + + We recommend only using this `post_generate_id_token_with_metadata` + interceptor in new development instead of the `post_generate_id_token` interceptor. + When both interceptors are used, this `post_generate_id_token_with_metadata` interceptor runs after the + `post_generate_id_token` interceptor. The (possibly modified) response returned by + `post_generate_id_token` will be passed to + `post_generate_id_token_with_metadata`. + """ + return response, metadata + + def pre_sign_blob(self, request: common.SignBlobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.SignBlobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for sign_blob Override in a subclass to manipulate the request or metadata @@ -145,12 +193,32 @@ def pre_sign_blob(self, request: common.SignBlobRequest, metadata: Sequence[Tupl def post_sign_blob(self, response: common.SignBlobResponse) -> common.SignBlobResponse: """Post-rpc interceptor for sign_blob - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_sign_blob_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IAMCredentials server but before - it is returned to user code. + it is returned to user code. This `post_sign_blob` interceptor runs + before the `post_sign_blob_with_metadata` interceptor. """ return response - def pre_sign_jwt(self, request: common.SignJwtRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[common.SignJwtRequest, Sequence[Tuple[str, str]]]: + + def post_sign_blob_with_metadata(self, response: common.SignBlobResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.SignBlobResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for sign_blob + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IAMCredentials server but before it is returned to user code. + + We recommend only using this `post_sign_blob_with_metadata` + interceptor in new development instead of the `post_sign_blob` interceptor. + When both interceptors are used, this `post_sign_blob_with_metadata` interceptor runs after the + `post_sign_blob` interceptor. The (possibly modified) response returned by + `post_sign_blob` will be passed to + `post_sign_blob_with_metadata`. + """ + return response, metadata + + def pre_sign_jwt(self, request: common.SignJwtRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.SignJwtRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for sign_jwt Override in a subclass to manipulate the request or metadata @@ -161,12 +229,31 @@ def pre_sign_jwt(self, request: common.SignJwtRequest, metadata: Sequence[Tuple[ def post_sign_jwt(self, response: common.SignJwtResponse) -> common.SignJwtResponse: """Post-rpc interceptor for sign_jwt - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_sign_jwt_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IAMCredentials server but before - it is returned to user code. + it is returned to user code. This `post_sign_jwt` interceptor runs + before the `post_sign_jwt_with_metadata` interceptor. """ return response + def post_sign_jwt_with_metadata(self, response: common.SignJwtResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.SignJwtResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for sign_jwt + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IAMCredentials server but before it is returned to user code. + + We recommend only using this `post_sign_jwt_with_metadata` + interceptor in new development instead of the `post_sign_jwt` interceptor. + When both interceptors are used, this `post_sign_jwt_with_metadata` interceptor runs after the + `post_sign_jwt` interceptor. The (possibly modified) response returned by + `post_sign_jwt` will be passed to + `post_sign_jwt_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class IAMCredentialsRestStub: @@ -296,7 +383,7 @@ def __call__(self, request: common.GenerateAccessTokenRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> common.GenerateAccessTokenResponse: r"""Call the generate access token method over HTTP. @@ -306,8 +393,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.common.GenerateAccessTokenResponse: @@ -315,6 +404,7 @@ def __call__(self, """ http_options = _BaseIAMCredentialsRestTransport._BaseGenerateAccessToken._get_http_options() + request, metadata = self._interceptor.pre_generate_access_token(request, metadata) transcoded_request = _BaseIAMCredentialsRestTransport._BaseGenerateAccessToken._get_transcoded_request(http_options, request) @@ -323,6 +413,29 @@ def __call__(self, # Jsonify the query params query_params = _BaseIAMCredentialsRestTransport._BaseGenerateAccessToken._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.iam.credentials_v1.IAMCredentialsClient.GenerateAccessToken", + extra = { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "rpcName": "GenerateAccessToken", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = IAMCredentialsRestTransport._GenerateAccessToken._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -336,7 +449,29 @@ def __call__(self, pb_resp = common.GenerateAccessTokenResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_generate_access_token(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_access_token_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = common.GenerateAccessTokenResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.iam.credentials_v1.IAMCredentialsClient.generate_access_token", + extra = { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "rpcName": "GenerateAccessToken", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _GenerateIdToken(_BaseIAMCredentialsRestTransport._BaseGenerateIdToken, IAMCredentialsRestStub): @@ -370,7 +505,7 @@ def __call__(self, request: common.GenerateIdTokenRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> common.GenerateIdTokenResponse: r"""Call the generate id token method over HTTP. @@ -380,8 +515,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.common.GenerateIdTokenResponse: @@ -389,6 +526,7 @@ def __call__(self, """ http_options = _BaseIAMCredentialsRestTransport._BaseGenerateIdToken._get_http_options() + request, metadata = self._interceptor.pre_generate_id_token(request, metadata) transcoded_request = _BaseIAMCredentialsRestTransport._BaseGenerateIdToken._get_transcoded_request(http_options, request) @@ -397,6 +535,29 @@ def __call__(self, # Jsonify the query params query_params = _BaseIAMCredentialsRestTransport._BaseGenerateIdToken._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.iam.credentials_v1.IAMCredentialsClient.GenerateIdToken", + extra = { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "rpcName": "GenerateIdToken", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = IAMCredentialsRestTransport._GenerateIdToken._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -410,7 +571,29 @@ def __call__(self, pb_resp = common.GenerateIdTokenResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_generate_id_token(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_id_token_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = common.GenerateIdTokenResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.iam.credentials_v1.IAMCredentialsClient.generate_id_token", + extra = { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "rpcName": "GenerateIdToken", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _SignBlob(_BaseIAMCredentialsRestTransport._BaseSignBlob, IAMCredentialsRestStub): @@ -444,7 +627,7 @@ def __call__(self, request: common.SignBlobRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> common.SignBlobResponse: r"""Call the sign blob method over HTTP. @@ -454,8 +637,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.common.SignBlobResponse: @@ -463,6 +648,7 @@ def __call__(self, """ http_options = _BaseIAMCredentialsRestTransport._BaseSignBlob._get_http_options() + request, metadata = self._interceptor.pre_sign_blob(request, metadata) transcoded_request = _BaseIAMCredentialsRestTransport._BaseSignBlob._get_transcoded_request(http_options, request) @@ -471,6 +657,29 @@ def __call__(self, # Jsonify the query params query_params = _BaseIAMCredentialsRestTransport._BaseSignBlob._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.iam.credentials_v1.IAMCredentialsClient.SignBlob", + extra = { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "rpcName": "SignBlob", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = IAMCredentialsRestTransport._SignBlob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -484,7 +693,29 @@ def __call__(self, pb_resp = common.SignBlobResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_sign_blob(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_sign_blob_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = common.SignBlobResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.iam.credentials_v1.IAMCredentialsClient.sign_blob", + extra = { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "rpcName": "SignBlob", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _SignJwt(_BaseIAMCredentialsRestTransport._BaseSignJwt, IAMCredentialsRestStub): @@ -518,7 +749,7 @@ def __call__(self, request: common.SignJwtRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> common.SignJwtResponse: r"""Call the sign jwt method over HTTP. @@ -528,8 +759,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.common.SignJwtResponse: @@ -537,6 +770,7 @@ def __call__(self, """ http_options = _BaseIAMCredentialsRestTransport._BaseSignJwt._get_http_options() + request, metadata = self._interceptor.pre_sign_jwt(request, metadata) transcoded_request = _BaseIAMCredentialsRestTransport._BaseSignJwt._get_transcoded_request(http_options, request) @@ -545,6 +779,29 @@ def __call__(self, # Jsonify the query params query_params = _BaseIAMCredentialsRestTransport._BaseSignJwt._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.iam.credentials_v1.IAMCredentialsClient.SignJwt", + extra = { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "rpcName": "SignJwt", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = IAMCredentialsRestTransport._SignJwt._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -558,7 +815,29 @@ def __call__(self, pb_resp = common.SignJwtResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_sign_jwt(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_sign_jwt_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = common.SignJwtResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.iam.credentials_v1.IAMCredentialsClient.sign_jwt", + extra = { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "rpcName": "SignJwt", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property diff --git a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest_base.py b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest_base.py index 3ffce85ffd..8d1bc46501 100755 --- a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest_base.py +++ b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest_base.py @@ -19,7 +19,6 @@ from google.protobuf import json_format from .base import IAMCredentialsTransport, DEFAULT_CLIENT_INFO -from google.auth import credentials as ga_credentials # type: ignore import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union @@ -43,7 +42,7 @@ class _BaseIAMCredentialsRestTransport(IAMCredentialsTransport): def __init__(self, *, host: str = 'iamcredentials.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, + credentials: Optional[Any] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, url_scheme: str = 'https', @@ -53,7 +52,7 @@ def __init__(self, *, Args: host (Optional[str]): The hostname to connect to (default: 'iamcredentials.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The + credentials (Optional[Any]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the diff --git a/tests/integration/goldens/credentials/noxfile.py b/tests/integration/goldens/credentials/noxfile.py index 2194befe9c..3b9e7366bb 100755 --- a/tests/integration/goldens/credentials/noxfile.py +++ b/tests/integration/goldens/credentials/noxfile.py @@ -29,7 +29,8 @@ "3.9", "3.10", "3.11", - "3.12" + "3.12", + "3.13", ] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -39,7 +40,7 @@ BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.12" +DEFAULT_PYTHON_VERSION = "3.13" nox.sessions = [ "unit", @@ -61,7 +62,7 @@ def unit(session, protobuf_implementation): """Run the unit test suite.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') @@ -94,7 +95,7 @@ def unit(session, protobuf_implementation): def prerelease_deps(session, protobuf_implementation): """Run the unit test suite against pre-release versions of dependencies.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install test environment dependencies @@ -129,7 +130,8 @@ def prerelease_deps(session, protobuf_implementation): "googleapis-common-protos", "google-api-core", "google-auth", - "grpcio", + # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 + "grpcio!=1.67.0rc1", "grpcio-status", "protobuf", "proto-plus", diff --git a/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_google.iam.credentials.v1.json b/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_google.iam.credentials.v1.json index 317a7f4a2d..636249950f 100755 --- a/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_google.iam.credentials.v1.json +++ b/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_google.iam.credentials.v1.json @@ -59,7 +59,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.iam.credentials_v1.types.GenerateAccessTokenResponse", @@ -151,7 +151,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.iam.credentials_v1.types.GenerateAccessTokenResponse", @@ -244,7 +244,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.iam.credentials_v1.types.GenerateIdTokenResponse", @@ -336,7 +336,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.iam.credentials_v1.types.GenerateIdTokenResponse", @@ -425,7 +425,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.iam.credentials_v1.types.SignBlobResponse", @@ -513,7 +513,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.iam.credentials_v1.types.SignBlobResponse", @@ -602,7 +602,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.iam.credentials_v1.types.SignJwtResponse", @@ -690,7 +690,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.iam.credentials_v1.types.SignJwtResponse", diff --git a/tests/integration/goldens/credentials/setup.py b/tests/integration/goldens/credentials/setup.py index 28f72ad696..36e57a1705 100755 --- a/tests/integration/goldens/credentials/setup.py +++ b/tests/integration/goldens/credentials/setup.py @@ -44,8 +44,11 @@ # See https://github.com/googleapis/google-cloud-python/issues/12364 "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", + "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] +extras = { +} url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-iam-credentials" package_root = os.path.abspath(os.path.dirname(__file__)) @@ -81,6 +84,7 @@ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Operating System :: OS Independent", "Topic :: Internet", ], @@ -88,6 +92,7 @@ packages=packages, python_requires=">=3.7", install_requires=dependencies, + extras_require=extras, include_package_data=True, zip_safe=False, ) diff --git a/tests/integration/goldens/credentials/testing/constraints-3.13.txt b/tests/integration/goldens/credentials/testing/constraints-3.13.txt new file mode 100755 index 0000000000..ed7f9aed25 --- /dev/null +++ b/tests/integration/goldens/credentials/testing/constraints-3.13.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index aa4bb494fb..30d886331a 100755 --- a/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -23,7 +23,7 @@ import grpc from grpc.experimental import aio -from collections.abc import Iterable +from collections.abc import Iterable, AsyncIterable from google.protobuf import json_format import json import math @@ -68,6 +68,13 @@ } CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -184,6 +191,7 @@ def test__get_universe_domain(): IAMCredentialsClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." + @pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ (401, CRED_INFO_JSON, True), (403, CRED_INFO_JSON, True), @@ -283,7 +291,6 @@ def test__validate_universe_domain(client_class, transport_class, transport_name with pytest.raises(ValueError): client._compare_universes("foo.bar", None) - @pytest.mark.parametrize("client_class,transport_name", [ (IAMCredentialsClient, "grpc"), (IAMCredentialsAsyncClient, "grpc_asyncio"), @@ -853,25 +860,6 @@ def test_generate_access_token(request_type, transport: str = 'grpc'): assert response.access_token == 'access_token_value' -def test_generate_access_token_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_access_token), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.generate_access_token() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == common.GenerateAccessTokenRequest() - - def test_generate_access_token_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -931,29 +919,6 @@ def test_generate_access_token_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_generate_access_token_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = IAMCredentialsAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_access_token), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.GenerateAccessTokenResponse( - access_token='access_token_value', - )) - await client.generate_access_token() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == common.GenerateAccessTokenRequest() - - @pytest.mark.asyncio async def test_generate_access_token_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1024,7 +989,6 @@ async def test_generate_access_token_async(transport: str = 'grpc_asyncio', requ async def test_generate_access_token_async_from_dict(): await test_generate_access_token_async(request_type=dict) - def test_generate_access_token_field_headers(): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1231,25 +1195,6 @@ def test_generate_id_token(request_type, transport: str = 'grpc'): assert response.token == 'token_value' -def test_generate_id_token_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_id_token), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.generate_id_token() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == common.GenerateIdTokenRequest() - - def test_generate_id_token_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1311,29 +1256,6 @@ def test_generate_id_token_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_generate_id_token_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = IAMCredentialsAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_id_token), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.GenerateIdTokenResponse( - token='token_value', - )) - await client.generate_id_token() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == common.GenerateIdTokenRequest() - - @pytest.mark.asyncio async def test_generate_id_token_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1404,7 +1326,6 @@ async def test_generate_id_token_async(transport: str = 'grpc_asyncio', request_ async def test_generate_id_token_async_from_dict(): await test_generate_id_token_async(request_type=dict) - def test_generate_id_token_field_headers(): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1617,25 +1538,6 @@ def test_sign_blob(request_type, transport: str = 'grpc'): assert response.signed_blob == b'signed_blob_blob' -def test_sign_blob_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.sign_blob), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.sign_blob() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == common.SignBlobRequest() - - def test_sign_blob_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1695,30 +1597,6 @@ def test_sign_blob_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_sign_blob_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = IAMCredentialsAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.sign_blob), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.SignBlobResponse( - key_id='key_id_value', - signed_blob=b'signed_blob_blob', - )) - await client.sign_blob() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == common.SignBlobRequest() - - @pytest.mark.asyncio async def test_sign_blob_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1791,7 +1669,6 @@ async def test_sign_blob_async(transport: str = 'grpc_asyncio', request_type=com async def test_sign_blob_async_from_dict(): await test_sign_blob_async(request_type=dict) - def test_sign_blob_field_headers(): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1994,25 +1871,6 @@ def test_sign_jwt(request_type, transport: str = 'grpc'): assert response.signed_jwt == 'signed_jwt_value' -def test_sign_jwt_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.sign_jwt), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.sign_jwt() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == common.SignJwtRequest() - - def test_sign_jwt_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2074,30 +1932,6 @@ def test_sign_jwt_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_sign_jwt_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = IAMCredentialsAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.sign_jwt), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.SignJwtResponse( - key_id='key_id_value', - signed_jwt='signed_jwt_value', - )) - await client.sign_jwt() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == common.SignJwtRequest() - - @pytest.mark.asyncio async def test_sign_jwt_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2170,7 +2004,6 @@ async def test_sign_jwt_async(transport: str = 'grpc_asyncio', request_type=comm async def test_sign_jwt_async_from_dict(): await test_sign_jwt_async(request_type=dict) - def test_sign_jwt_field_headers(): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2336,42 +2169,6 @@ async def test_sign_jwt_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [ - common.GenerateAccessTokenRequest, - dict, -]) -def test_generate_access_token_rest(request_type): - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = common.GenerateAccessTokenResponse( - access_token='access_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common.GenerateAccessTokenResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.generate_access_token(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common.GenerateAccessTokenResponse) - assert response.access_token == 'access_token_value' - def test_generate_access_token_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2472,6 +2269,7 @@ def test_generate_access_token_rest_required_fields(request_type=common.Generate response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.generate_access_token(request) @@ -2488,66 +2286,6 @@ def test_generate_access_token_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", "scope", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_generate_access_token_rest_interceptors(null_interceptor): - transport = transports.IAMCredentialsRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), - ) - client = IAMCredentialsClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_generate_access_token") as post, \ - mock.patch.object(transports.IAMCredentialsRestInterceptor, "pre_generate_access_token") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = common.GenerateAccessTokenRequest.pb(common.GenerateAccessTokenRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common.GenerateAccessTokenResponse.to_json(common.GenerateAccessTokenResponse()) - - request = common.GenerateAccessTokenRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common.GenerateAccessTokenResponse() - - client.generate_access_token(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_generate_access_token_rest_bad_request(transport: str = 'rest', request_type=common.GenerateAccessTokenRequest): - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.generate_access_token(request) - - def test_generate_access_token_rest_flattened(): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2579,6 +2317,7 @@ def test_generate_access_token_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.generate_access_token(**mock_args) @@ -2607,69 +2346,26 @@ def test_generate_access_token_rest_flattened_error(transport: str = 'rest'): ) -def test_generate_access_token_rest_error(): - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - +def test_generate_id_token_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) -@pytest.mark.parametrize("request_type", [ - common.GenerateIdTokenRequest, - dict, -]) -def test_generate_id_token_rest(request_type): - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} - request = request_type(**request_init) + # Ensure method has been cached + assert client._transport.generate_id_token in client._transport._wrapped_methods - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = common.GenerateIdTokenResponse( - token='token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common.GenerateIdTokenResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.generate_id_token(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common.GenerateIdTokenResponse) - assert response.token == 'token_value' - -def test_generate_id_token_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.generate_id_token in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.generate_id_token] = mock_rpc + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.generate_id_token] = mock_rpc request = {} client.generate_id_token(request) @@ -2750,6 +2446,7 @@ def test_generate_id_token_rest_required_fields(request_type=common.GenerateIdTo response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.generate_id_token(request) @@ -2766,66 +2463,6 @@ def test_generate_id_token_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", "audience", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_generate_id_token_rest_interceptors(null_interceptor): - transport = transports.IAMCredentialsRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), - ) - client = IAMCredentialsClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_generate_id_token") as post, \ - mock.patch.object(transports.IAMCredentialsRestInterceptor, "pre_generate_id_token") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = common.GenerateIdTokenRequest.pb(common.GenerateIdTokenRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common.GenerateIdTokenResponse.to_json(common.GenerateIdTokenResponse()) - - request = common.GenerateIdTokenRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common.GenerateIdTokenResponse() - - client.generate_id_token(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_generate_id_token_rest_bad_request(transport: str = 'rest', request_type=common.GenerateIdTokenRequest): - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.generate_id_token(request) - - def test_generate_id_token_rest_flattened(): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2857,6 +2494,7 @@ def test_generate_id_token_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.generate_id_token(**mock_args) @@ -2885,51 +2523,6 @@ def test_generate_id_token_rest_flattened_error(transport: str = 'rest'): ) -def test_generate_id_token_rest_error(): - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - common.SignBlobRequest, - dict, -]) -def test_sign_blob_rest(request_type): - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = common.SignBlobResponse( - key_id='key_id_value', - signed_blob=b'signed_blob_blob', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common.SignBlobResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.sign_blob(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common.SignBlobResponse) - assert response.key_id == 'key_id_value' - assert response.signed_blob == b'signed_blob_blob' - def test_sign_blob_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3030,6 +2623,7 @@ def test_sign_blob_rest_required_fields(request_type=common.SignBlobRequest): response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.sign_blob(request) @@ -3046,66 +2640,6 @@ def test_sign_blob_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", "payload", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_sign_blob_rest_interceptors(null_interceptor): - transport = transports.IAMCredentialsRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), - ) - client = IAMCredentialsClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_sign_blob") as post, \ - mock.patch.object(transports.IAMCredentialsRestInterceptor, "pre_sign_blob") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = common.SignBlobRequest.pb(common.SignBlobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common.SignBlobResponse.to_json(common.SignBlobResponse()) - - request = common.SignBlobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common.SignBlobResponse() - - client.sign_blob(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_sign_blob_rest_bad_request(transport: str = 'rest', request_type=common.SignBlobRequest): - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.sign_blob(request) - - def test_sign_blob_rest_flattened(): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3136,6 +2670,7 @@ def test_sign_blob_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.sign_blob(**mock_args) @@ -3163,51 +2698,6 @@ def test_sign_blob_rest_flattened_error(transport: str = 'rest'): ) -def test_sign_blob_rest_error(): - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - common.SignJwtRequest, - dict, -]) -def test_sign_jwt_rest(request_type): - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = common.SignJwtResponse( - key_id='key_id_value', - signed_jwt='signed_jwt_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common.SignJwtResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.sign_jwt(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common.SignJwtResponse) - assert response.key_id == 'key_id_value' - assert response.signed_jwt == 'signed_jwt_value' - def test_sign_jwt_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3308,6 +2798,7 @@ def test_sign_jwt_rest_required_fields(request_type=common.SignJwtRequest): response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.sign_jwt(request) @@ -3317,27 +2808,688 @@ def test_sign_jwt_rest_required_fields(request_type=common.SignJwtRequest): assert expected_params == actual_params -def test_sign_jwt_rest_unset_required_fields(): - transport = transports.IAMCredentialsRestTransport(credentials=ga_credentials.AnonymousCredentials) +def test_sign_jwt_rest_unset_required_fields(): + transport = transports.IAMCredentialsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.sign_jwt._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", "payload", ))) + + +def test_sign_jwt_rest_flattened(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = common.SignJwtResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/serviceAccounts/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + delegates=['delegates_value'], + payload='payload_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common.SignJwtResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.sign_jwt(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/serviceAccounts/*}:signJwt" % client.transport._host, args[1]) + + +def test_sign_jwt_rest_flattened_error(transport: str = 'rest'): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.sign_jwt( + common.SignJwtRequest(), + name='name_value', + delegates=['delegates_value'], + payload='payload_value', + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.IAMCredentialsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.IAMCredentialsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = IAMCredentialsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.IAMCredentialsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = IAMCredentialsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = IAMCredentialsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.IAMCredentialsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = IAMCredentialsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.IAMCredentialsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = IAMCredentialsClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.IAMCredentialsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.IAMCredentialsGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.IAMCredentialsGrpcTransport, + transports.IAMCredentialsGrpcAsyncIOTransport, + transports.IAMCredentialsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = IAMCredentialsClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_generate_access_token_empty_call_grpc(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.generate_access_token), + '__call__') as call: + call.return_value = common.GenerateAccessTokenResponse() + client.generate_access_token(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = common.GenerateAccessTokenRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_generate_id_token_empty_call_grpc(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.generate_id_token), + '__call__') as call: + call.return_value = common.GenerateIdTokenResponse() + client.generate_id_token(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = common.GenerateIdTokenRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_sign_blob_empty_call_grpc(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.sign_blob), + '__call__') as call: + call.return_value = common.SignBlobResponse() + client.sign_blob(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = common.SignBlobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_sign_jwt_empty_call_grpc(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.sign_jwt), + '__call__') as call: + call.return_value = common.SignJwtResponse() + client.sign_jwt(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = common.SignJwtRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = IAMCredentialsAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = IAMCredentialsAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_generate_access_token_empty_call_grpc_asyncio(): + client = IAMCredentialsAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.generate_access_token), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.GenerateAccessTokenResponse( + access_token='access_token_value', + )) + await client.generate_access_token(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = common.GenerateAccessTokenRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_generate_id_token_empty_call_grpc_asyncio(): + client = IAMCredentialsAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.generate_id_token), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.GenerateIdTokenResponse( + token='token_value', + )) + await client.generate_id_token(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = common.GenerateIdTokenRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_sign_blob_empty_call_grpc_asyncio(): + client = IAMCredentialsAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.sign_blob), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.SignBlobResponse( + key_id='key_id_value', + signed_blob=b'signed_blob_blob', + )) + await client.sign_blob(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = common.SignBlobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_sign_jwt_empty_call_grpc_asyncio(): + client = IAMCredentialsAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.sign_jwt), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.SignJwtResponse( + key_id='key_id_value', + signed_jwt='signed_jwt_value', + )) + await client.sign_jwt(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = common.SignJwtRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = IAMCredentialsClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_generate_access_token_rest_bad_request(request_type=common.GenerateAccessTokenRequest): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.generate_access_token(request) + + +@pytest.mark.parametrize("request_type", [ + common.GenerateAccessTokenRequest, + dict, +]) +def test_generate_access_token_rest_call_success(request_type): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = common.GenerateAccessTokenResponse( + access_token='access_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common.GenerateAccessTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.generate_access_token(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common.GenerateAccessTokenResponse) + assert response.access_token == 'access_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_generate_access_token_rest_interceptors(null_interceptor): + transport = transports.IAMCredentialsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), + ) + client = IAMCredentialsClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_generate_access_token") as post, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_generate_access_token_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "pre_generate_access_token") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = common.GenerateAccessTokenRequest.pb(common.GenerateAccessTokenRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = common.GenerateAccessTokenResponse.to_json(common.GenerateAccessTokenResponse()) + req.return_value.content = return_value + + request = common.GenerateAccessTokenRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common.GenerateAccessTokenResponse() + post_with_metadata.return_value = common.GenerateAccessTokenResponse(), metadata + + client.generate_access_token(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_generate_id_token_rest_bad_request(request_type=common.GenerateIdTokenRequest): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.generate_id_token(request) + + +@pytest.mark.parametrize("request_type", [ + common.GenerateIdTokenRequest, + dict, +]) +def test_generate_id_token_rest_call_success(request_type): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = common.GenerateIdTokenResponse( + token='token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common.GenerateIdTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.generate_id_token(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common.GenerateIdTokenResponse) + assert response.token == 'token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_generate_id_token_rest_interceptors(null_interceptor): + transport = transports.IAMCredentialsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), + ) + client = IAMCredentialsClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_generate_id_token") as post, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_generate_id_token_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "pre_generate_id_token") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = common.GenerateIdTokenRequest.pb(common.GenerateIdTokenRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = common.GenerateIdTokenResponse.to_json(common.GenerateIdTokenResponse()) + req.return_value.content = return_value + + request = common.GenerateIdTokenRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common.GenerateIdTokenResponse() + post_with_metadata.return_value = common.GenerateIdTokenResponse(), metadata + + client.generate_id_token(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_sign_blob_rest_bad_request(request_type=common.SignBlobRequest): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.sign_blob(request) + + +@pytest.mark.parametrize("request_type", [ + common.SignBlobRequest, + dict, +]) +def test_sign_blob_rest_call_success(request_type): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = common.SignBlobResponse( + key_id='key_id_value', + signed_blob=b'signed_blob_blob', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common.SignBlobResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.sign_blob(request) - unset_fields = transport.sign_jwt._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "payload", ))) + # Establish that the response is the type that we expect. + assert isinstance(response, common.SignBlobResponse) + assert response.key_id == 'key_id_value' + assert response.signed_blob == b'signed_blob_blob' @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_sign_jwt_rest_interceptors(null_interceptor): +def test_sign_blob_rest_interceptors(null_interceptor): transport = transports.IAMCredentialsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), ) client = IAMCredentialsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_sign_jwt") as post, \ - mock.patch.object(transports.IAMCredentialsRestInterceptor, "pre_sign_jwt") as pre: + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_sign_blob") as post, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_sign_blob_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "pre_sign_blob") as pre: pre.assert_not_called() post.assert_not_called() - pb_message = common.SignJwtRequest.pb(common.SignJwtRequest()) + post_with_metadata.assert_not_called() + pb_message = common.SignBlobRequest.pb(common.SignBlobRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -3345,31 +3497,33 @@ def test_sign_jwt_rest_interceptors(null_interceptor): "query_params": pb_message, } - req.return_value = Response() + req.return_value = mock.Mock() req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common.SignJwtResponse.to_json(common.SignJwtResponse()) + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = common.SignBlobResponse.to_json(common.SignBlobResponse()) + req.return_value.content = return_value - request = common.SignJwtRequest() + request = common.SignBlobRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = common.SignJwtResponse() + post.return_value = common.SignBlobResponse() + post_with_metadata.return_value = common.SignBlobResponse(), metadata - client.sign_jwt(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.sign_blob(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() -def test_sign_jwt_rest_bad_request(transport: str = 'rest', request_type=common.SignJwtRequest): +def test_sign_jwt_rest_bad_request(request_type=common.SignJwtRequest): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest" ) - # send a request that will satisfy transcoding request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} request = request_type(**request_init) @@ -3377,183 +3531,195 @@ def test_sign_jwt_rest_bad_request(transport: str = 'rest', request_type=common. # Mock the http request call within the method and fake a BadRequest error. with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): # Wrap the value into a proper Response obj - response_value = Response() + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 - response_value.request = Request() + response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.sign_jwt(request) -def test_sign_jwt_rest_flattened(): +@pytest.mark.parametrize("request_type", [ + common.SignJwtRequest, + dict, +]) +def test_sign_jwt_rest_call_success(request_type): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="rest" ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = common.SignJwtResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/serviceAccounts/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - delegates=['delegates_value'], - payload='payload_value', + return_value = common.SignJwtResponse( + key_id='key_id_value', + signed_jwt='signed_jwt_value', ) - mock_args.update(sample_request) # Wrap the value into a proper Response obj - response_value = Response() + response_value = mock.Mock() response_value.status_code = 200 + # Convert return value to protobuf type return_value = common.SignJwtResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.sign_jwt(request) - client.sign_jwt(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/serviceAccounts/*}:signJwt" % client.transport._host, args[1]) + # Establish that the response is the type that we expect. + assert isinstance(response, common.SignJwtResponse) + assert response.key_id == 'key_id_value' + assert response.signed_jwt == 'signed_jwt_value' -def test_sign_jwt_rest_flattened_error(transport: str = 'rest'): - client = IAMCredentialsClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_sign_jwt_rest_interceptors(null_interceptor): + transport = transports.IAMCredentialsRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.sign_jwt( - common.SignJwtRequest(), - name='name_value', - delegates=['delegates_value'], - payload='payload_value', + interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), ) + client = IAMCredentialsClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_sign_jwt") as post, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_sign_jwt_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "pre_sign_jwt") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = common.SignJwtRequest.pb(common.SignJwtRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = common.SignJwtResponse.to_json(common.SignJwtResponse()) + req.return_value.content = return_value + + request = common.SignJwtRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common.SignJwtResponse() + post_with_metadata.return_value = common.SignJwtResponse(), metadata + + client.sign_jwt(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() -def test_sign_jwt_rest_error(): +def test_initialize_client_w_rest(): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + transport="rest" ) + assert client is not None -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.IAMCredentialsGrpcTransport( +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_generate_access_token_empty_call_rest(): + client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.IAMCredentialsGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = IAMCredentialsClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.generate_access_token), + '__call__') as call: + client.generate_access_token(request=None) - # It is an error to provide an api_key and a transport instance. - transport = transports.IAMCredentialsGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = IAMCredentialsClient( - client_options=options, - transport=transport, - ) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = common.GenerateAccessTokenRequest() - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = IAMCredentialsClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) + assert args[0] == request_msg - # It is an error to provide scopes and a transport instance. - transport = transports.IAMCredentialsGrpcTransport( + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_generate_id_token_empty_call_rest(): + client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = IAMCredentialsClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.generate_id_token), + '__call__') as call: + client.generate_id_token(request=None) -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.IAMCredentialsGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = IAMCredentialsClient(transport=transport) - assert client.transport is transport + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = common.GenerateIdTokenRequest() -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.IAMCredentialsGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel + assert args[0] == request_msg - transport = transports.IAMCredentialsGrpcAsyncIOTransport( + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_sign_blob_empty_call_rest(): + client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - channel = transport.grpc_channel - assert channel -@pytest.mark.parametrize("transport_class", [ - transports.IAMCredentialsGrpcTransport, - transports.IAMCredentialsGrpcAsyncIOTransport, - transports.IAMCredentialsRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.sign_blob), + '__call__') as call: + client.sign_blob(request=None) -def test_transport_kind_grpc(): - transport = IAMCredentialsClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = common.SignBlobRequest() + assert args[0] == request_msg -def test_transport_kind_grpc_asyncio(): - transport = IAMCredentialsAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_sign_jwt_empty_call_rest(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert transport.kind == "grpc_asyncio" + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.sign_jwt), + '__call__') as call: + client.sign_jwt(request=None) -def test_transport_kind_rest(): - transport = IAMCredentialsClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = common.SignJwtRequest() + + assert args[0] == request_msg def test_transport_grpc_default(): @@ -4075,33 +4241,40 @@ def test_client_with_default_client_info(): ) prep.assert_called_once_with(client_info) + +def test_transport_close_grpc(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + @pytest.mark.asyncio -async def test_transport_close_async(): +async def test_transport_close_grpc_asyncio(): client = IAMCredentialsAsyncClient( credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + transport="grpc_asyncio" ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: async with client: close.assert_not_called() close.assert_called_once() -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } +def test_transport_close_rest(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() - for transport, close_name in transports.items(): - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() def test_client_ctx(): transports = [ diff --git a/tests/integration/goldens/eventarc/docs/index.rst b/tests/integration/goldens/eventarc/docs/index.rst index cd50176117..0a20636626 100755 --- a/tests/integration/goldens/eventarc/docs/index.rst +++ b/tests/integration/goldens/eventarc/docs/index.rst @@ -3,5 +3,5 @@ API Reference .. toctree:: :maxdepth: 2 - eventarc_v1/services - eventarc_v1/types + eventarc_v1/services_ + eventarc_v1/types_ diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index 85e32e4989..501680371f 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union @@ -55,6 +56,13 @@ from .transports.grpc_asyncio import EventarcGrpcAsyncIOTransport from .client import EventarcClient +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) class EventarcAsyncClient: """Eventarc allows users to subscribe to various events that are @@ -265,13 +273,27 @@ def __init__(self, *, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.eventarc_v1.EventarcAsyncClient`.", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "credentialsType": None, + } + ) + async def get_trigger(self, request: Optional[Union[eventarc.GetTriggerRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> trigger.Trigger: r"""Get a single trigger. @@ -315,8 +337,10 @@ async def sample_get_trigger(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.types.Trigger: @@ -374,7 +398,7 @@ async def list_triggers(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListTriggersAsyncPager: r"""List triggers. @@ -419,8 +443,10 @@ async def sample_list_triggers(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.services.eventarc.pagers.ListTriggersAsyncPager: @@ -493,7 +519,7 @@ async def create_trigger(self, trigger_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Create a new trigger in a particular project and location. @@ -564,8 +590,10 @@ async def sample_create_trigger(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -640,7 +668,7 @@ async def update_trigger(self, allow_missing: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Update a single trigger. @@ -703,8 +731,10 @@ async def sample_update_trigger(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -778,7 +808,7 @@ async def delete_trigger(self, allow_missing: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Delete a single trigger. @@ -835,8 +865,10 @@ async def sample_delete_trigger(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -907,7 +939,7 @@ async def get_channel(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> channel.Channel: r"""Get a single Channel. @@ -951,8 +983,10 @@ async def sample_get_channel(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.types.Channel: @@ -1016,7 +1050,7 @@ async def list_channels(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListChannelsAsyncPager: r"""List channels. @@ -1061,8 +1095,10 @@ async def sample_list_channels(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelsAsyncPager: @@ -1135,7 +1171,7 @@ async def create_channel(self, channel_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Create a new channel in a particular project and location. @@ -1203,8 +1239,10 @@ async def sample_create_channel(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -1281,7 +1319,7 @@ async def update_channel(self, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Update a single channel. @@ -1336,8 +1374,10 @@ async def sample_update_channel(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -1411,7 +1451,7 @@ async def delete_channel(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Delete a single channel. @@ -1460,8 +1500,10 @@ async def sample_delete_channel(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -1533,7 +1575,7 @@ async def get_provider(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> discovery.Provider: r"""Get a single Provider. @@ -1577,8 +1619,10 @@ async def sample_get_provider(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.types.Provider: @@ -1636,7 +1680,7 @@ async def list_providers(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListProvidersAsyncPager: r"""List providers. @@ -1681,8 +1725,10 @@ async def sample_list_providers(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.services.eventarc.pagers.ListProvidersAsyncPager: @@ -1753,7 +1799,7 @@ async def get_channel_connection(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> channel_connection.ChannelConnection: r"""Get a single ChannelConnection. @@ -1797,8 +1843,10 @@ async def sample_get_channel_connection(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.types.ChannelConnection: @@ -1861,7 +1909,7 @@ async def list_channel_connections(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListChannelConnectionsAsyncPager: r"""List channel connections. @@ -1906,8 +1954,10 @@ async def sample_list_channel_connections(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelConnectionsAsyncPager: @@ -1981,7 +2031,7 @@ async def create_channel_connection(self, channel_connection_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Create a new ChannelConnection in a particular project and location. @@ -2050,8 +2100,10 @@ async def sample_create_channel_connection(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -2126,7 +2178,7 @@ async def delete_channel_connection(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Delete a single ChannelConnection. @@ -2174,8 +2226,10 @@ async def sample_delete_channel_connection(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -2246,7 +2300,7 @@ async def get_google_channel_config(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> google_channel_config.GoogleChannelConfig: r"""Get a GoogleChannelConfig @@ -2290,8 +2344,10 @@ async def sample_get_google_channel_config(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.types.GoogleChannelConfig: @@ -2356,7 +2412,7 @@ async def update_google_channel_config(self, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gce_google_channel_config.GoogleChannelConfig: r"""Update a single GoogleChannelConfig @@ -2410,8 +2466,10 @@ async def sample_update_google_channel_config(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.types.GoogleChannelConfig: @@ -2477,7 +2535,7 @@ async def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -2488,8 +2546,10 @@ async def list_operations( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -2502,11 +2562,7 @@ async def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -2531,7 +2587,7 @@ async def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -2542,8 +2598,10 @@ async def get_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -2556,11 +2614,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -2585,7 +2639,7 @@ async def delete_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a long-running operation. @@ -2601,8 +2655,10 @@ async def delete_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -2614,11 +2670,7 @@ async def delete_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -2639,7 +2691,7 @@ async def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -2654,8 +2706,10 @@ async def cancel_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -2667,11 +2721,7 @@ async def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -2692,7 +2742,7 @@ async def set_iam_policy( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -2705,8 +2755,10 @@ async def set_iam_policy( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -2783,11 +2835,7 @@ async def set_iam_policy( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.set_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -2812,7 +2860,7 @@ async def get_iam_policy( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -2826,8 +2874,10 @@ async def get_iam_policy( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -2904,11 +2954,7 @@ async def get_iam_policy( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -2933,7 +2979,7 @@ async def test_iam_permissions( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control policy for a function. @@ -2948,8 +2994,10 @@ async def test_iam_permissions( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.iam_policy_pb2.TestIamPermissionsResponse: Response message for ``TestIamPermissions`` method. @@ -2963,11 +3011,7 @@ async def test_iam_permissions( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.test_iam_permissions] # Certain fields should be provided within the metadata header; # add these here. @@ -2992,7 +3036,7 @@ async def get_location( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: r"""Gets information about a location. @@ -3003,8 +3047,10 @@ async def get_location( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.location_pb2.Location: Location object. @@ -3017,11 +3063,7 @@ async def get_location( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_location, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_location] # Certain fields should be provided within the metadata header; # add these here. @@ -3046,7 +3088,7 @@ async def list_locations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: r"""Lists information about the supported locations for this service. @@ -3057,8 +3099,10 @@ async def list_locations( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.location_pb2.ListLocationsResponse: Response message for ``ListLocations`` method. @@ -3071,11 +3115,7 @@ async def list_locations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_locations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] # Certain fields should be provided within the metadata header; # add these here. diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index aa4e14e52e..3b452908a8 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -16,6 +16,7 @@ from collections import OrderedDict from http import HTTPStatus import json +import logging as std_logging import os import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast @@ -38,6 +39,14 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.eventarc_v1.services.eventarc import pagers @@ -507,33 +516,6 @@ def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_ raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes(client_universe: str, - credentials: ga_credentials.Credentials) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = EventarcClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError("The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default.") - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -543,9 +525,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = (self._is_universe_domain_valid or - EventarcClient._compare_universes(self.universe_domain, self.transport._credentials)) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True def _add_cred_info_for_auth_errors( self, @@ -660,6 +642,10 @@ def __init__(self, *, # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError("client_options.api_key and credentials are mutually exclusive") @@ -712,13 +698,28 @@ def __init__(self, *, api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.eventarc_v1.EventarcClient`.", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "credentialsType": None, + } + ) + def get_trigger(self, request: Optional[Union[eventarc.GetTriggerRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> trigger.Trigger: r"""Get a single trigger. @@ -762,8 +763,10 @@ def sample_get_trigger(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.types.Trigger: @@ -792,7 +795,7 @@ def sample_get_trigger(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_trigger] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -820,7 +823,7 @@ def list_triggers(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListTriggersPager: r"""List triggers. @@ -865,8 +868,10 @@ def sample_list_triggers(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.services.eventarc.pagers.ListTriggersPager: @@ -897,7 +902,7 @@ def sample_list_triggers(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_triggers] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -938,7 +943,7 @@ def create_trigger(self, trigger_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Create a new trigger in a particular project and location. @@ -1009,8 +1014,10 @@ def sample_create_trigger(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1046,7 +1053,7 @@ def sample_create_trigger(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_trigger] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1084,7 +1091,7 @@ def update_trigger(self, allow_missing: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Update a single trigger. @@ -1147,8 +1154,10 @@ def sample_update_trigger(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1184,7 +1193,7 @@ def sample_update_trigger(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_trigger] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1221,7 +1230,7 @@ def delete_trigger(self, allow_missing: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Delete a single trigger. @@ -1278,8 +1287,10 @@ def sample_delete_trigger(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1313,7 +1324,7 @@ def sample_delete_trigger(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_trigger] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1349,7 +1360,7 @@ def get_channel(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> channel.Channel: r"""Get a single Channel. @@ -1393,8 +1404,10 @@ def sample_get_channel(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.types.Channel: @@ -1429,7 +1442,7 @@ def sample_get_channel(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_channel] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1457,7 +1470,7 @@ def list_channels(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListChannelsPager: r"""List channels. @@ -1502,8 +1515,10 @@ def sample_list_channels(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelsPager: @@ -1534,7 +1549,7 @@ def sample_list_channels(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_channels] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1575,7 +1590,7 @@ def create_channel(self, channel_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Create a new channel in a particular project and location. @@ -1643,8 +1658,10 @@ def sample_create_channel(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1683,7 +1700,7 @@ def sample_create_channel(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_channel_] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1720,7 +1737,7 @@ def update_channel(self, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Update a single channel. @@ -1775,8 +1792,10 @@ def sample_update_channel(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1813,7 +1832,7 @@ def sample_update_channel(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_channel] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1849,7 +1868,7 @@ def delete_channel(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Delete a single channel. @@ -1898,8 +1917,10 @@ def sample_delete_channel(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1934,7 +1955,7 @@ def sample_delete_channel(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_channel] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1970,7 +1991,7 @@ def get_provider(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> discovery.Provider: r"""Get a single Provider. @@ -2014,8 +2035,10 @@ def sample_get_provider(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.types.Provider: @@ -2044,7 +2067,7 @@ def sample_get_provider(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_provider] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2072,7 +2095,7 @@ def list_providers(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListProvidersPager: r"""List providers. @@ -2117,8 +2140,10 @@ def sample_list_providers(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.services.eventarc.pagers.ListProvidersPager: @@ -2149,7 +2174,7 @@ def sample_list_providers(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_providers] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2188,7 +2213,7 @@ def get_channel_connection(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> channel_connection.ChannelConnection: r"""Get a single ChannelConnection. @@ -2232,8 +2257,10 @@ def sample_get_channel_connection(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.types.ChannelConnection: @@ -2267,7 +2294,7 @@ def sample_get_channel_connection(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_channel_connection] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2295,7 +2322,7 @@ def list_channel_connections(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListChannelConnectionsPager: r"""List channel connections. @@ -2340,8 +2367,10 @@ def sample_list_channel_connections(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelConnectionsPager: @@ -2373,7 +2402,7 @@ def sample_list_channel_connections(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_channel_connections] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2414,7 +2443,7 @@ def create_channel_connection(self, channel_connection_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Create a new ChannelConnection in a particular project and location. @@ -2483,8 +2512,10 @@ def sample_create_channel_connection(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -2522,7 +2553,7 @@ def sample_create_channel_connection(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_channel_connection] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2558,7 +2589,7 @@ def delete_channel_connection(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Delete a single ChannelConnection. @@ -2606,8 +2637,10 @@ def sample_delete_channel_connection(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -2641,7 +2674,7 @@ def sample_delete_channel_connection(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_channel_connection] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2677,7 +2710,7 @@ def get_google_channel_config(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> google_channel_config.GoogleChannelConfig: r"""Get a GoogleChannelConfig @@ -2721,8 +2754,10 @@ def sample_get_google_channel_config(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.types.GoogleChannelConfig: @@ -2757,7 +2792,7 @@ def sample_get_google_channel_config(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_google_channel_config] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2786,7 +2821,7 @@ def update_google_channel_config(self, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gce_google_channel_config.GoogleChannelConfig: r"""Update a single GoogleChannelConfig @@ -2840,8 +2875,10 @@ def sample_update_google_channel_config(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.types.GoogleChannelConfig: @@ -2878,7 +2915,7 @@ def sample_update_google_channel_config(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_google_channel_config] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2919,7 +2956,7 @@ def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -2930,8 +2967,10 @@ def list_operations( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -2944,11 +2983,7 @@ def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -2977,7 +3012,7 @@ def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -2988,8 +3023,10 @@ def get_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -3002,11 +3039,7 @@ def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -3035,7 +3068,7 @@ def delete_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a long-running operation. @@ -3051,8 +3084,10 @@ def delete_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -3064,11 +3099,7 @@ def delete_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.delete_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -3089,7 +3120,7 @@ def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -3104,8 +3135,10 @@ def cancel_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -3117,11 +3150,7 @@ def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -3142,7 +3171,7 @@ def set_iam_policy( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -3155,8 +3184,10 @@ def set_iam_policy( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -3233,11 +3264,7 @@ def set_iam_policy( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -3266,7 +3293,7 @@ def get_iam_policy( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -3280,8 +3307,10 @@ def get_iam_policy( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -3358,11 +3387,7 @@ def get_iam_policy( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -3391,7 +3416,7 @@ def test_iam_permissions( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control policy for a function. @@ -3406,8 +3431,10 @@ def test_iam_permissions( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.iam_policy_pb2.TestIamPermissionsResponse: Response message for ``TestIamPermissions`` method. @@ -3421,11 +3448,7 @@ def test_iam_permissions( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] # Certain fields should be provided within the metadata header; # add these here. @@ -3454,7 +3477,7 @@ def get_location( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: r"""Gets information about a location. @@ -3465,8 +3488,10 @@ def get_location( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.location_pb2.Location: Location object. @@ -3479,11 +3504,7 @@ def get_location( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_location, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_location] # Certain fields should be provided within the metadata header; # add these here. @@ -3512,7 +3533,7 @@ def list_locations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: r"""Lists information about the supported locations for this service. @@ -3523,8 +3544,10 @@ def list_locations( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.location_pb2.ListLocationsResponse: Response message for ``ListLocations`` method. @@ -3537,11 +3560,7 @@ def list_locations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_locations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.list_locations] # Certain fields should be provided within the metadata header; # add these here. diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py index 9ef97e16e5..3acf89e71b 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py @@ -55,7 +55,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -68,8 +68,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = eventarc.ListTriggersRequest(request) @@ -121,7 +123,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -134,8 +136,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = eventarc.ListTriggersRequest(request) @@ -190,7 +194,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -203,8 +207,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = eventarc.ListChannelsRequest(request) @@ -256,7 +262,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -269,8 +275,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = eventarc.ListChannelsRequest(request) @@ -325,7 +333,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -338,8 +346,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = eventarc.ListProvidersRequest(request) @@ -391,7 +401,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -404,8 +414,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = eventarc.ListProvidersRequest(request) @@ -460,7 +472,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -473,8 +485,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = eventarc.ListChannelConnectionsRequest(request) @@ -526,7 +540,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -539,8 +553,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = eventarc.ListChannelConnectionsRequest(request) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py index 86ab6167d4..4ad308359a 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py @@ -220,6 +220,51 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py index 54dc681435..8c602c7fd8 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -22,8 +25,11 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.cloud.eventarc_v1.types import channel from google.cloud.eventarc_v1.types import channel_connection @@ -38,6 +44,74 @@ from google.longrunning import operations_pb2 # type: ignore from .base import EventarcTransport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class EventarcGrpcTransport(EventarcTransport): """gRPC backend transport for Eventarc. @@ -191,7 +265,10 @@ def __init__(self, *, ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -255,7 +332,7 @@ def operations_client(self) -> operations_v1.OperationsClient: # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsClient( - self.grpc_channel + self._logged_channel ) # Return the client from cache. @@ -280,7 +357,7 @@ def get_trigger(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_trigger' not in self._stubs: - self._stubs['get_trigger'] = self.grpc_channel.unary_unary( + self._stubs['get_trigger'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/GetTrigger', request_serializer=eventarc.GetTriggerRequest.serialize, response_deserializer=trigger.Trigger.deserialize, @@ -306,7 +383,7 @@ def list_triggers(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_triggers' not in self._stubs: - self._stubs['list_triggers'] = self.grpc_channel.unary_unary( + self._stubs['list_triggers'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/ListTriggers', request_serializer=eventarc.ListTriggersRequest.serialize, response_deserializer=eventarc.ListTriggersResponse.deserialize, @@ -333,7 +410,7 @@ def create_trigger(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_trigger' not in self._stubs: - self._stubs['create_trigger'] = self.grpc_channel.unary_unary( + self._stubs['create_trigger'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/CreateTrigger', request_serializer=eventarc.CreateTriggerRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -359,7 +436,7 @@ def update_trigger(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_trigger' not in self._stubs: - self._stubs['update_trigger'] = self.grpc_channel.unary_unary( + self._stubs['update_trigger'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/UpdateTrigger', request_serializer=eventarc.UpdateTriggerRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -385,7 +462,7 @@ def delete_trigger(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_trigger' not in self._stubs: - self._stubs['delete_trigger'] = self.grpc_channel.unary_unary( + self._stubs['delete_trigger'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/DeleteTrigger', request_serializer=eventarc.DeleteTriggerRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -411,7 +488,7 @@ def get_channel(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_channel' not in self._stubs: - self._stubs['get_channel'] = self.grpc_channel.unary_unary( + self._stubs['get_channel'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/GetChannel', request_serializer=eventarc.GetChannelRequest.serialize, response_deserializer=channel.Channel.deserialize, @@ -437,7 +514,7 @@ def list_channels(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_channels' not in self._stubs: - self._stubs['list_channels'] = self.grpc_channel.unary_unary( + self._stubs['list_channels'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/ListChannels', request_serializer=eventarc.ListChannelsRequest.serialize, response_deserializer=eventarc.ListChannelsResponse.deserialize, @@ -464,7 +541,7 @@ def create_channel_(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_channel_' not in self._stubs: - self._stubs['create_channel_'] = self.grpc_channel.unary_unary( + self._stubs['create_channel_'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/CreateChannel', request_serializer=eventarc.CreateChannelRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -490,7 +567,7 @@ def update_channel(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_channel' not in self._stubs: - self._stubs['update_channel'] = self.grpc_channel.unary_unary( + self._stubs['update_channel'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/UpdateChannel', request_serializer=eventarc.UpdateChannelRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -516,7 +593,7 @@ def delete_channel(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_channel' not in self._stubs: - self._stubs['delete_channel'] = self.grpc_channel.unary_unary( + self._stubs['delete_channel'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/DeleteChannel', request_serializer=eventarc.DeleteChannelRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -542,7 +619,7 @@ def get_provider(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_provider' not in self._stubs: - self._stubs['get_provider'] = self.grpc_channel.unary_unary( + self._stubs['get_provider'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/GetProvider', request_serializer=eventarc.GetProviderRequest.serialize, response_deserializer=discovery.Provider.deserialize, @@ -568,7 +645,7 @@ def list_providers(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_providers' not in self._stubs: - self._stubs['list_providers'] = self.grpc_channel.unary_unary( + self._stubs['list_providers'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/ListProviders', request_serializer=eventarc.ListProvidersRequest.serialize, response_deserializer=eventarc.ListProvidersResponse.deserialize, @@ -594,7 +671,7 @@ def get_channel_connection(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_channel_connection' not in self._stubs: - self._stubs['get_channel_connection'] = self.grpc_channel.unary_unary( + self._stubs['get_channel_connection'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/GetChannelConnection', request_serializer=eventarc.GetChannelConnectionRequest.serialize, response_deserializer=channel_connection.ChannelConnection.deserialize, @@ -620,7 +697,7 @@ def list_channel_connections(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_channel_connections' not in self._stubs: - self._stubs['list_channel_connections'] = self.grpc_channel.unary_unary( + self._stubs['list_channel_connections'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/ListChannelConnections', request_serializer=eventarc.ListChannelConnectionsRequest.serialize, response_deserializer=eventarc.ListChannelConnectionsResponse.deserialize, @@ -647,7 +724,7 @@ def create_channel_connection(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_channel_connection' not in self._stubs: - self._stubs['create_channel_connection'] = self.grpc_channel.unary_unary( + self._stubs['create_channel_connection'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/CreateChannelConnection', request_serializer=eventarc.CreateChannelConnectionRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -673,7 +750,7 @@ def delete_channel_connection(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_channel_connection' not in self._stubs: - self._stubs['delete_channel_connection'] = self.grpc_channel.unary_unary( + self._stubs['delete_channel_connection'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/DeleteChannelConnection', request_serializer=eventarc.DeleteChannelConnectionRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -699,7 +776,7 @@ def get_google_channel_config(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_google_channel_config' not in self._stubs: - self._stubs['get_google_channel_config'] = self.grpc_channel.unary_unary( + self._stubs['get_google_channel_config'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/GetGoogleChannelConfig', request_serializer=eventarc.GetGoogleChannelConfigRequest.serialize, response_deserializer=google_channel_config.GoogleChannelConfig.deserialize, @@ -725,7 +802,7 @@ def update_google_channel_config(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_google_channel_config' not in self._stubs: - self._stubs['update_google_channel_config'] = self.grpc_channel.unary_unary( + self._stubs['update_google_channel_config'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/UpdateGoogleChannelConfig', request_serializer=eventarc.UpdateGoogleChannelConfigRequest.serialize, response_deserializer=gce_google_channel_config.GoogleChannelConfig.deserialize, @@ -733,7 +810,7 @@ def update_google_channel_config(self) -> Callable[ return self._stubs['update_google_channel_config'] def close(self): - self.grpc_channel.close() + self._logged_channel.close() @property def delete_operation( @@ -746,7 +823,7 @@ def delete_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + self._stubs["delete_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/DeleteOperation", request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, response_deserializer=None, @@ -764,7 +841,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -782,7 +859,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -800,7 +877,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, @@ -818,7 +895,7 @@ def list_locations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( + self._stubs["list_locations"] = self._logged_channel.unary_unary( "/google.cloud.location.Locations/ListLocations", request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, response_deserializer=locations_pb2.ListLocationsResponse.FromString, @@ -836,7 +913,7 @@ def get_location( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( + self._stubs["get_location"] = self._logged_channel.unary_unary( "/google.cloud.location.Locations/GetLocation", request_serializer=locations_pb2.GetLocationRequest.SerializeToString, response_deserializer=locations_pb2.Location.FromString, @@ -861,7 +938,7 @@ def set_iam_policy( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/SetIamPolicy", request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, response_deserializer=policy_pb2.Policy.FromString, @@ -887,7 +964,7 @@ def get_iam_policy( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/GetIamPolicy", request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, response_deserializer=policy_pb2.Policy.FromString, @@ -915,7 +992,7 @@ def test_iam_permissions( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/TestIamPermissions", request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py index 3dbf4ebfe6..38009d7f90 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py @@ -14,6 +14,9 @@ # limitations under the License. # import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -24,8 +27,11 @@ from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.eventarc_v1.types import channel @@ -42,6 +48,73 @@ from .base import EventarcTransport, DEFAULT_CLIENT_INFO from .grpc import EventarcGrpcTransport +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class EventarcGrpcAsyncIOTransport(EventarcTransport): """gRPC AsyncIO backend transport for Eventarc. @@ -237,8 +310,11 @@ def __init__(self, *, ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -261,7 +337,7 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel + self._logged_channel ) # Return the client from cache. @@ -286,7 +362,7 @@ def get_trigger(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_trigger' not in self._stubs: - self._stubs['get_trigger'] = self.grpc_channel.unary_unary( + self._stubs['get_trigger'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/GetTrigger', request_serializer=eventarc.GetTriggerRequest.serialize, response_deserializer=trigger.Trigger.deserialize, @@ -312,7 +388,7 @@ def list_triggers(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_triggers' not in self._stubs: - self._stubs['list_triggers'] = self.grpc_channel.unary_unary( + self._stubs['list_triggers'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/ListTriggers', request_serializer=eventarc.ListTriggersRequest.serialize, response_deserializer=eventarc.ListTriggersResponse.deserialize, @@ -339,7 +415,7 @@ def create_trigger(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_trigger' not in self._stubs: - self._stubs['create_trigger'] = self.grpc_channel.unary_unary( + self._stubs['create_trigger'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/CreateTrigger', request_serializer=eventarc.CreateTriggerRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -365,7 +441,7 @@ def update_trigger(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_trigger' not in self._stubs: - self._stubs['update_trigger'] = self.grpc_channel.unary_unary( + self._stubs['update_trigger'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/UpdateTrigger', request_serializer=eventarc.UpdateTriggerRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -391,7 +467,7 @@ def delete_trigger(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_trigger' not in self._stubs: - self._stubs['delete_trigger'] = self.grpc_channel.unary_unary( + self._stubs['delete_trigger'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/DeleteTrigger', request_serializer=eventarc.DeleteTriggerRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -417,7 +493,7 @@ def get_channel(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_channel' not in self._stubs: - self._stubs['get_channel'] = self.grpc_channel.unary_unary( + self._stubs['get_channel'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/GetChannel', request_serializer=eventarc.GetChannelRequest.serialize, response_deserializer=channel.Channel.deserialize, @@ -443,7 +519,7 @@ def list_channels(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_channels' not in self._stubs: - self._stubs['list_channels'] = self.grpc_channel.unary_unary( + self._stubs['list_channels'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/ListChannels', request_serializer=eventarc.ListChannelsRequest.serialize, response_deserializer=eventarc.ListChannelsResponse.deserialize, @@ -470,7 +546,7 @@ def create_channel_(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_channel_' not in self._stubs: - self._stubs['create_channel_'] = self.grpc_channel.unary_unary( + self._stubs['create_channel_'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/CreateChannel', request_serializer=eventarc.CreateChannelRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -496,7 +572,7 @@ def update_channel(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_channel' not in self._stubs: - self._stubs['update_channel'] = self.grpc_channel.unary_unary( + self._stubs['update_channel'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/UpdateChannel', request_serializer=eventarc.UpdateChannelRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -522,7 +598,7 @@ def delete_channel(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_channel' not in self._stubs: - self._stubs['delete_channel'] = self.grpc_channel.unary_unary( + self._stubs['delete_channel'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/DeleteChannel', request_serializer=eventarc.DeleteChannelRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -548,7 +624,7 @@ def get_provider(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_provider' not in self._stubs: - self._stubs['get_provider'] = self.grpc_channel.unary_unary( + self._stubs['get_provider'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/GetProvider', request_serializer=eventarc.GetProviderRequest.serialize, response_deserializer=discovery.Provider.deserialize, @@ -574,7 +650,7 @@ def list_providers(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_providers' not in self._stubs: - self._stubs['list_providers'] = self.grpc_channel.unary_unary( + self._stubs['list_providers'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/ListProviders', request_serializer=eventarc.ListProvidersRequest.serialize, response_deserializer=eventarc.ListProvidersResponse.deserialize, @@ -600,7 +676,7 @@ def get_channel_connection(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_channel_connection' not in self._stubs: - self._stubs['get_channel_connection'] = self.grpc_channel.unary_unary( + self._stubs['get_channel_connection'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/GetChannelConnection', request_serializer=eventarc.GetChannelConnectionRequest.serialize, response_deserializer=channel_connection.ChannelConnection.deserialize, @@ -626,7 +702,7 @@ def list_channel_connections(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_channel_connections' not in self._stubs: - self._stubs['list_channel_connections'] = self.grpc_channel.unary_unary( + self._stubs['list_channel_connections'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/ListChannelConnections', request_serializer=eventarc.ListChannelConnectionsRequest.serialize, response_deserializer=eventarc.ListChannelConnectionsResponse.deserialize, @@ -653,7 +729,7 @@ def create_channel_connection(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_channel_connection' not in self._stubs: - self._stubs['create_channel_connection'] = self.grpc_channel.unary_unary( + self._stubs['create_channel_connection'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/CreateChannelConnection', request_serializer=eventarc.CreateChannelConnectionRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -679,7 +755,7 @@ def delete_channel_connection(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_channel_connection' not in self._stubs: - self._stubs['delete_channel_connection'] = self.grpc_channel.unary_unary( + self._stubs['delete_channel_connection'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/DeleteChannelConnection', request_serializer=eventarc.DeleteChannelConnectionRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -705,7 +781,7 @@ def get_google_channel_config(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_google_channel_config' not in self._stubs: - self._stubs['get_google_channel_config'] = self.grpc_channel.unary_unary( + self._stubs['get_google_channel_config'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/GetGoogleChannelConfig', request_serializer=eventarc.GetGoogleChannelConfigRequest.serialize, response_deserializer=google_channel_config.GoogleChannelConfig.deserialize, @@ -731,7 +807,7 @@ def update_google_channel_config(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_google_channel_config' not in self._stubs: - self._stubs['update_google_channel_config'] = self.grpc_channel.unary_unary( + self._stubs['update_google_channel_config'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/UpdateGoogleChannelConfig', request_serializer=eventarc.UpdateGoogleChannelConfigRequest.serialize, response_deserializer=gce_google_channel_config.GoogleChannelConfig.deserialize, @@ -831,6 +907,51 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: self._wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: self._wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: self._wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } def _wrap_method(self, func, *args, **kwargs): @@ -839,7 +960,7 @@ def _wrap_method(self, func, *args, **kwargs): return gapic_v1.method_async.wrap_method(func, *args, **kwargs) def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() @property def kind(self) -> str: @@ -856,7 +977,7 @@ def delete_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + self._stubs["delete_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/DeleteOperation", request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, response_deserializer=None, @@ -874,7 +995,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -892,7 +1013,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -910,7 +1031,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, @@ -928,7 +1049,7 @@ def list_locations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( + self._stubs["list_locations"] = self._logged_channel.unary_unary( "/google.cloud.location.Locations/ListLocations", request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, response_deserializer=locations_pb2.ListLocationsResponse.FromString, @@ -946,7 +1067,7 @@ def get_location( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( + self._stubs["get_location"] = self._logged_channel.unary_unary( "/google.cloud.location.Locations/GetLocation", request_serializer=locations_pb2.GetLocationRequest.SerializeToString, response_deserializer=locations_pb2.Location.FromString, @@ -971,7 +1092,7 @@ def set_iam_policy( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/SetIamPolicy", request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, response_deserializer=policy_pb2.Policy.FromString, @@ -997,7 +1118,7 @@ def get_iam_policy( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/GetIamPolicy", request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, response_deserializer=policy_pb2.Policy.FromString, @@ -1025,7 +1146,7 @@ def test_iam_permissions( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/TestIamPermissions", request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index be3f58cf1d..32211ddf9e 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -13,9 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging +import json # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries @@ -53,11 +54,18 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, - rest_version=requests_version, + rest_version=f"requests@{requests_version}", ) @@ -225,7 +233,7 @@ def post_update_trigger(self, response): """ - def pre_create_channel(self, request: eventarc.CreateChannelRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.CreateChannelRequest, Sequence[Tuple[str, str]]]: + def pre_create_channel(self, request: eventarc.CreateChannelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.CreateChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_channel Override in a subclass to manipulate the request or metadata @@ -236,12 +244,32 @@ def pre_create_channel(self, request: eventarc.CreateChannelRequest, metadata: S def post_create_channel(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for create_channel - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_channel_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_create_channel` interceptor runs + before the `post_create_channel_with_metadata` interceptor. """ return response - def pre_create_channel_connection(self, request: eventarc.CreateChannelConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.CreateChannelConnectionRequest, Sequence[Tuple[str, str]]]: + + def post_create_channel_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_channel + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_create_channel_with_metadata` + interceptor in new development instead of the `post_create_channel` interceptor. + When both interceptors are used, this `post_create_channel_with_metadata` interceptor runs after the + `post_create_channel` interceptor. The (possibly modified) response returned by + `post_create_channel` will be passed to + `post_create_channel_with_metadata`. + """ + return response, metadata + + def pre_create_channel_connection(self, request: eventarc.CreateChannelConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.CreateChannelConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_channel_connection Override in a subclass to manipulate the request or metadata @@ -252,12 +280,32 @@ def pre_create_channel_connection(self, request: eventarc.CreateChannelConnectio def post_create_channel_connection(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for create_channel_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_channel_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_create_channel_connection` interceptor runs + before the `post_create_channel_connection_with_metadata` interceptor. """ return response - def pre_create_trigger(self, request: eventarc.CreateTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.CreateTriggerRequest, Sequence[Tuple[str, str]]]: + + def post_create_channel_connection_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_channel_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_create_channel_connection_with_metadata` + interceptor in new development instead of the `post_create_channel_connection` interceptor. + When both interceptors are used, this `post_create_channel_connection_with_metadata` interceptor runs after the + `post_create_channel_connection` interceptor. The (possibly modified) response returned by + `post_create_channel_connection` will be passed to + `post_create_channel_connection_with_metadata`. + """ + return response, metadata + + def pre_create_trigger(self, request: eventarc.CreateTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.CreateTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_trigger Override in a subclass to manipulate the request or metadata @@ -268,12 +316,32 @@ def pre_create_trigger(self, request: eventarc.CreateTriggerRequest, metadata: S def post_create_trigger(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for create_trigger - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_trigger_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_create_trigger` interceptor runs + before the `post_create_trigger_with_metadata` interceptor. """ return response - def pre_delete_channel(self, request: eventarc.DeleteChannelRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.DeleteChannelRequest, Sequence[Tuple[str, str]]]: + + def post_create_trigger_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_trigger + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_create_trigger_with_metadata` + interceptor in new development instead of the `post_create_trigger` interceptor. + When both interceptors are used, this `post_create_trigger_with_metadata` interceptor runs after the + `post_create_trigger` interceptor. The (possibly modified) response returned by + `post_create_trigger` will be passed to + `post_create_trigger_with_metadata`. + """ + return response, metadata + + def pre_delete_channel(self, request: eventarc.DeleteChannelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.DeleteChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_channel Override in a subclass to manipulate the request or metadata @@ -284,12 +352,32 @@ def pre_delete_channel(self, request: eventarc.DeleteChannelRequest, metadata: S def post_delete_channel(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for delete_channel - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_channel_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_delete_channel` interceptor runs + before the `post_delete_channel_with_metadata` interceptor. """ return response - def pre_delete_channel_connection(self, request: eventarc.DeleteChannelConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.DeleteChannelConnectionRequest, Sequence[Tuple[str, str]]]: + + def post_delete_channel_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_channel + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_delete_channel_with_metadata` + interceptor in new development instead of the `post_delete_channel` interceptor. + When both interceptors are used, this `post_delete_channel_with_metadata` interceptor runs after the + `post_delete_channel` interceptor. The (possibly modified) response returned by + `post_delete_channel` will be passed to + `post_delete_channel_with_metadata`. + """ + return response, metadata + + def pre_delete_channel_connection(self, request: eventarc.DeleteChannelConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.DeleteChannelConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_channel_connection Override in a subclass to manipulate the request or metadata @@ -300,12 +388,32 @@ def pre_delete_channel_connection(self, request: eventarc.DeleteChannelConnectio def post_delete_channel_connection(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for delete_channel_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_channel_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_delete_channel_connection` interceptor runs + before the `post_delete_channel_connection_with_metadata` interceptor. """ return response - def pre_delete_trigger(self, request: eventarc.DeleteTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.DeleteTriggerRequest, Sequence[Tuple[str, str]]]: + + def post_delete_channel_connection_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_channel_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_delete_channel_connection_with_metadata` + interceptor in new development instead of the `post_delete_channel_connection` interceptor. + When both interceptors are used, this `post_delete_channel_connection_with_metadata` interceptor runs after the + `post_delete_channel_connection` interceptor. The (possibly modified) response returned by + `post_delete_channel_connection` will be passed to + `post_delete_channel_connection_with_metadata`. + """ + return response, metadata + + def pre_delete_trigger(self, request: eventarc.DeleteTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.DeleteTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_trigger Override in a subclass to manipulate the request or metadata @@ -316,12 +424,32 @@ def pre_delete_trigger(self, request: eventarc.DeleteTriggerRequest, metadata: S def post_delete_trigger(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for delete_trigger - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_trigger_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_delete_trigger` interceptor runs + before the `post_delete_trigger_with_metadata` interceptor. """ return response - def pre_get_channel(self, request: eventarc.GetChannelRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.GetChannelRequest, Sequence[Tuple[str, str]]]: + + def post_delete_trigger_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_trigger + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_delete_trigger_with_metadata` + interceptor in new development instead of the `post_delete_trigger` interceptor. + When both interceptors are used, this `post_delete_trigger_with_metadata` interceptor runs after the + `post_delete_trigger` interceptor. The (possibly modified) response returned by + `post_delete_trigger` will be passed to + `post_delete_trigger_with_metadata`. + """ + return response, metadata + + def pre_get_channel(self, request: eventarc.GetChannelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_channel Override in a subclass to manipulate the request or metadata @@ -332,12 +460,32 @@ def pre_get_channel(self, request: eventarc.GetChannelRequest, metadata: Sequenc def post_get_channel(self, response: channel.Channel) -> channel.Channel: """Post-rpc interceptor for get_channel - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_channel_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_get_channel` interceptor runs + before the `post_get_channel_with_metadata` interceptor. """ return response - def pre_get_channel_connection(self, request: eventarc.GetChannelConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.GetChannelConnectionRequest, Sequence[Tuple[str, str]]]: + + def post_get_channel_with_metadata(self, response: channel.Channel, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[channel.Channel, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_channel + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_get_channel_with_metadata` + interceptor in new development instead of the `post_get_channel` interceptor. + When both interceptors are used, this `post_get_channel_with_metadata` interceptor runs after the + `post_get_channel` interceptor. The (possibly modified) response returned by + `post_get_channel` will be passed to + `post_get_channel_with_metadata`. + """ + return response, metadata + + def pre_get_channel_connection(self, request: eventarc.GetChannelConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetChannelConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_channel_connection Override in a subclass to manipulate the request or metadata @@ -348,12 +496,32 @@ def pre_get_channel_connection(self, request: eventarc.GetChannelConnectionReque def post_get_channel_connection(self, response: channel_connection.ChannelConnection) -> channel_connection.ChannelConnection: """Post-rpc interceptor for get_channel_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_channel_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_get_channel_connection` interceptor runs + before the `post_get_channel_connection_with_metadata` interceptor. """ return response - def pre_get_google_channel_config(self, request: eventarc.GetGoogleChannelConfigRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.GetGoogleChannelConfigRequest, Sequence[Tuple[str, str]]]: + + def post_get_channel_connection_with_metadata(self, response: channel_connection.ChannelConnection, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[channel_connection.ChannelConnection, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_channel_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_get_channel_connection_with_metadata` + interceptor in new development instead of the `post_get_channel_connection` interceptor. + When both interceptors are used, this `post_get_channel_connection_with_metadata` interceptor runs after the + `post_get_channel_connection` interceptor. The (possibly modified) response returned by + `post_get_channel_connection` will be passed to + `post_get_channel_connection_with_metadata`. + """ + return response, metadata + + def pre_get_google_channel_config(self, request: eventarc.GetGoogleChannelConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetGoogleChannelConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_google_channel_config Override in a subclass to manipulate the request or metadata @@ -364,12 +532,32 @@ def pre_get_google_channel_config(self, request: eventarc.GetGoogleChannelConfig def post_get_google_channel_config(self, response: google_channel_config.GoogleChannelConfig) -> google_channel_config.GoogleChannelConfig: """Post-rpc interceptor for get_google_channel_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_google_channel_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_get_google_channel_config` interceptor runs + before the `post_get_google_channel_config_with_metadata` interceptor. """ return response - def pre_get_provider(self, request: eventarc.GetProviderRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.GetProviderRequest, Sequence[Tuple[str, str]]]: + + def post_get_google_channel_config_with_metadata(self, response: google_channel_config.GoogleChannelConfig, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[google_channel_config.GoogleChannelConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_google_channel_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_get_google_channel_config_with_metadata` + interceptor in new development instead of the `post_get_google_channel_config` interceptor. + When both interceptors are used, this `post_get_google_channel_config_with_metadata` interceptor runs after the + `post_get_google_channel_config` interceptor. The (possibly modified) response returned by + `post_get_google_channel_config` will be passed to + `post_get_google_channel_config_with_metadata`. + """ + return response, metadata + + def pre_get_provider(self, request: eventarc.GetProviderRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetProviderRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_provider Override in a subclass to manipulate the request or metadata @@ -380,12 +568,32 @@ def pre_get_provider(self, request: eventarc.GetProviderRequest, metadata: Seque def post_get_provider(self, response: discovery.Provider) -> discovery.Provider: """Post-rpc interceptor for get_provider - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_provider_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_get_provider` interceptor runs + before the `post_get_provider_with_metadata` interceptor. """ return response - def pre_get_trigger(self, request: eventarc.GetTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.GetTriggerRequest, Sequence[Tuple[str, str]]]: + + def post_get_provider_with_metadata(self, response: discovery.Provider, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[discovery.Provider, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_provider + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_get_provider_with_metadata` + interceptor in new development instead of the `post_get_provider` interceptor. + When both interceptors are used, this `post_get_provider_with_metadata` interceptor runs after the + `post_get_provider` interceptor. The (possibly modified) response returned by + `post_get_provider` will be passed to + `post_get_provider_with_metadata`. + """ + return response, metadata + + def pre_get_trigger(self, request: eventarc.GetTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_trigger Override in a subclass to manipulate the request or metadata @@ -396,12 +604,32 @@ def pre_get_trigger(self, request: eventarc.GetTriggerRequest, metadata: Sequenc def post_get_trigger(self, response: trigger.Trigger) -> trigger.Trigger: """Post-rpc interceptor for get_trigger - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_trigger_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_get_trigger` interceptor runs + before the `post_get_trigger_with_metadata` interceptor. """ return response - def pre_list_channel_connections(self, request: eventarc.ListChannelConnectionsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.ListChannelConnectionsRequest, Sequence[Tuple[str, str]]]: + + def post_get_trigger_with_metadata(self, response: trigger.Trigger, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[trigger.Trigger, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_trigger + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_get_trigger_with_metadata` + interceptor in new development instead of the `post_get_trigger` interceptor. + When both interceptors are used, this `post_get_trigger_with_metadata` interceptor runs after the + `post_get_trigger` interceptor. The (possibly modified) response returned by + `post_get_trigger` will be passed to + `post_get_trigger_with_metadata`. + """ + return response, metadata + + def pre_list_channel_connections(self, request: eventarc.ListChannelConnectionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListChannelConnectionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_channel_connections Override in a subclass to manipulate the request or metadata @@ -412,12 +640,32 @@ def pre_list_channel_connections(self, request: eventarc.ListChannelConnectionsR def post_list_channel_connections(self, response: eventarc.ListChannelConnectionsResponse) -> eventarc.ListChannelConnectionsResponse: """Post-rpc interceptor for list_channel_connections - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_channel_connections_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_list_channel_connections` interceptor runs + before the `post_list_channel_connections_with_metadata` interceptor. """ return response - def pre_list_channels(self, request: eventarc.ListChannelsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.ListChannelsRequest, Sequence[Tuple[str, str]]]: + + def post_list_channel_connections_with_metadata(self, response: eventarc.ListChannelConnectionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListChannelConnectionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_channel_connections + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_list_channel_connections_with_metadata` + interceptor in new development instead of the `post_list_channel_connections` interceptor. + When both interceptors are used, this `post_list_channel_connections_with_metadata` interceptor runs after the + `post_list_channel_connections` interceptor. The (possibly modified) response returned by + `post_list_channel_connections` will be passed to + `post_list_channel_connections_with_metadata`. + """ + return response, metadata + + def pre_list_channels(self, request: eventarc.ListChannelsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListChannelsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_channels Override in a subclass to manipulate the request or metadata @@ -428,12 +676,32 @@ def pre_list_channels(self, request: eventarc.ListChannelsRequest, metadata: Seq def post_list_channels(self, response: eventarc.ListChannelsResponse) -> eventarc.ListChannelsResponse: """Post-rpc interceptor for list_channels - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_channels_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_list_channels` interceptor runs + before the `post_list_channels_with_metadata` interceptor. """ return response - def pre_list_providers(self, request: eventarc.ListProvidersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.ListProvidersRequest, Sequence[Tuple[str, str]]]: + + def post_list_channels_with_metadata(self, response: eventarc.ListChannelsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListChannelsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_channels + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_list_channels_with_metadata` + interceptor in new development instead of the `post_list_channels` interceptor. + When both interceptors are used, this `post_list_channels_with_metadata` interceptor runs after the + `post_list_channels` interceptor. The (possibly modified) response returned by + `post_list_channels` will be passed to + `post_list_channels_with_metadata`. + """ + return response, metadata + + def pre_list_providers(self, request: eventarc.ListProvidersRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListProvidersRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_providers Override in a subclass to manipulate the request or metadata @@ -444,12 +712,32 @@ def pre_list_providers(self, request: eventarc.ListProvidersRequest, metadata: S def post_list_providers(self, response: eventarc.ListProvidersResponse) -> eventarc.ListProvidersResponse: """Post-rpc interceptor for list_providers - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_providers_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_list_providers` interceptor runs + before the `post_list_providers_with_metadata` interceptor. """ return response - def pre_list_triggers(self, request: eventarc.ListTriggersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.ListTriggersRequest, Sequence[Tuple[str, str]]]: + + def post_list_providers_with_metadata(self, response: eventarc.ListProvidersResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListProvidersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_providers + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_list_providers_with_metadata` + interceptor in new development instead of the `post_list_providers` interceptor. + When both interceptors are used, this `post_list_providers_with_metadata` interceptor runs after the + `post_list_providers` interceptor. The (possibly modified) response returned by + `post_list_providers` will be passed to + `post_list_providers_with_metadata`. + """ + return response, metadata + + def pre_list_triggers(self, request: eventarc.ListTriggersRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListTriggersRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_triggers Override in a subclass to manipulate the request or metadata @@ -460,12 +748,32 @@ def pre_list_triggers(self, request: eventarc.ListTriggersRequest, metadata: Seq def post_list_triggers(self, response: eventarc.ListTriggersResponse) -> eventarc.ListTriggersResponse: """Post-rpc interceptor for list_triggers - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_triggers_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_list_triggers` interceptor runs + before the `post_list_triggers_with_metadata` interceptor. """ return response - def pre_update_channel(self, request: eventarc.UpdateChannelRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.UpdateChannelRequest, Sequence[Tuple[str, str]]]: + + def post_list_triggers_with_metadata(self, response: eventarc.ListTriggersResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListTriggersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_triggers + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_list_triggers_with_metadata` + interceptor in new development instead of the `post_list_triggers` interceptor. + When both interceptors are used, this `post_list_triggers_with_metadata` interceptor runs after the + `post_list_triggers` interceptor. The (possibly modified) response returned by + `post_list_triggers` will be passed to + `post_list_triggers_with_metadata`. + """ + return response, metadata + + def pre_update_channel(self, request: eventarc.UpdateChannelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.UpdateChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_channel Override in a subclass to manipulate the request or metadata @@ -476,12 +784,32 @@ def pre_update_channel(self, request: eventarc.UpdateChannelRequest, metadata: S def post_update_channel(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for update_channel - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_channel_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_update_channel` interceptor runs + before the `post_update_channel_with_metadata` interceptor. """ return response - def pre_update_google_channel_config(self, request: eventarc.UpdateGoogleChannelConfigRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.UpdateGoogleChannelConfigRequest, Sequence[Tuple[str, str]]]: + + def post_update_channel_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_channel + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_update_channel_with_metadata` + interceptor in new development instead of the `post_update_channel` interceptor. + When both interceptors are used, this `post_update_channel_with_metadata` interceptor runs after the + `post_update_channel` interceptor. The (possibly modified) response returned by + `post_update_channel` will be passed to + `post_update_channel_with_metadata`. + """ + return response, metadata + + def pre_update_google_channel_config(self, request: eventarc.UpdateGoogleChannelConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.UpdateGoogleChannelConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_google_channel_config Override in a subclass to manipulate the request or metadata @@ -492,12 +820,32 @@ def pre_update_google_channel_config(self, request: eventarc.UpdateGoogleChannel def post_update_google_channel_config(self, response: gce_google_channel_config.GoogleChannelConfig) -> gce_google_channel_config.GoogleChannelConfig: """Post-rpc interceptor for update_google_channel_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_google_channel_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_update_google_channel_config` interceptor runs + before the `post_update_google_channel_config_with_metadata` interceptor. """ return response - def pre_update_trigger(self, request: eventarc.UpdateTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.UpdateTriggerRequest, Sequence[Tuple[str, str]]]: + + def post_update_google_channel_config_with_metadata(self, response: gce_google_channel_config.GoogleChannelConfig, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gce_google_channel_config.GoogleChannelConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_google_channel_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_update_google_channel_config_with_metadata` + interceptor in new development instead of the `post_update_google_channel_config` interceptor. + When both interceptors are used, this `post_update_google_channel_config_with_metadata` interceptor runs after the + `post_update_google_channel_config` interceptor. The (possibly modified) response returned by + `post_update_google_channel_config` will be passed to + `post_update_google_channel_config_with_metadata`. + """ + return response, metadata + + def pre_update_trigger(self, request: eventarc.UpdateTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.UpdateTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_trigger Override in a subclass to manipulate the request or metadata @@ -508,15 +856,34 @@ def pre_update_trigger(self, request: eventarc.UpdateTriggerRequest, metadata: S def post_update_trigger(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for update_trigger - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_trigger_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_update_trigger` interceptor runs + before the `post_update_trigger_with_metadata` interceptor. """ return response + def post_update_trigger_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_trigger + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_update_trigger_with_metadata` + interceptor in new development instead of the `post_update_trigger` interceptor. + When both interceptors are used, this `post_update_trigger_with_metadata` interceptor runs after the + `post_update_trigger` interceptor. The (possibly modified) response returned by + `post_update_trigger` will be passed to + `post_update_trigger_with_metadata`. + """ + return response, metadata + def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_location Override in a subclass to manipulate the request or metadata @@ -534,9 +901,10 @@ def post_get_location( it is returned to user code. """ return response + def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_locations Override in a subclass to manipulate the request or metadata @@ -554,9 +922,10 @@ def post_list_locations( it is returned to user code. """ return response + def pre_get_iam_policy( - self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_iam_policy Override in a subclass to manipulate the request or metadata @@ -574,9 +943,10 @@ def post_get_iam_policy( it is returned to user code. """ return response + def pre_set_iam_policy( - self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for set_iam_policy Override in a subclass to manipulate the request or metadata @@ -594,9 +964,10 @@ def post_set_iam_policy( it is returned to user code. """ return response + def pre_test_iam_permissions( - self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for test_iam_permissions Override in a subclass to manipulate the request or metadata @@ -614,9 +985,10 @@ def post_test_iam_permissions( it is returned to user code. """ return response + def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for cancel_operation Override in a subclass to manipulate the request or metadata @@ -634,9 +1006,10 @@ def post_cancel_operation( it is returned to user code. """ return response + def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_operation Override in a subclass to manipulate the request or metadata @@ -654,9 +1027,10 @@ def post_delete_operation( it is returned to user code. """ return response + def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata @@ -674,9 +1048,10 @@ def post_get_operation( it is returned to user code. """ return response + def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_operations Override in a subclass to manipulate the request or metadata @@ -868,7 +1243,7 @@ def __call__(self, request: eventarc.CreateChannelRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the create channel method over HTTP. @@ -879,8 +1254,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -891,6 +1268,7 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseCreateChannel._get_http_options() + request, metadata = self._interceptor.pre_create_channel(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseCreateChannel._get_transcoded_request(http_options, request) @@ -899,6 +1277,29 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseCreateChannel._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.CreateChannel", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "CreateChannel", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = EventarcRestTransport._CreateChannel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -910,7 +1311,29 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_channel(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_channel_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.create_channel_", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "CreateChannel", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _CreateChannelConnection(_BaseEventarcRestTransport._BaseCreateChannelConnection, EventarcRestStub): @@ -944,7 +1367,7 @@ def __call__(self, request: eventarc.CreateChannelConnectionRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the create channel connection method over HTTP. @@ -955,8 +1378,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -967,6 +1392,7 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseCreateChannelConnection._get_http_options() + request, metadata = self._interceptor.pre_create_channel_connection(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseCreateChannelConnection._get_transcoded_request(http_options, request) @@ -975,6 +1401,29 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseCreateChannelConnection._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.CreateChannelConnection", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "CreateChannelConnection", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = EventarcRestTransport._CreateChannelConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -986,7 +1435,29 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_channel_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_channel_connection_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.create_channel_connection", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "CreateChannelConnection", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _CreateTrigger(_BaseEventarcRestTransport._BaseCreateTrigger, EventarcRestStub): @@ -1020,7 +1491,7 @@ def __call__(self, request: eventarc.CreateTriggerRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the create trigger method over HTTP. @@ -1031,8 +1502,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1043,6 +1516,7 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseCreateTrigger._get_http_options() + request, metadata = self._interceptor.pre_create_trigger(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseCreateTrigger._get_transcoded_request(http_options, request) @@ -1051,6 +1525,29 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseCreateTrigger._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.CreateTrigger", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "CreateTrigger", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = EventarcRestTransport._CreateTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -1062,7 +1559,29 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_trigger(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_trigger_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.create_trigger", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "CreateTrigger", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _DeleteChannel(_BaseEventarcRestTransport._BaseDeleteChannel, EventarcRestStub): @@ -1095,7 +1614,7 @@ def __call__(self, request: eventarc.DeleteChannelRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the delete channel method over HTTP. @@ -1106,8 +1625,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1118,12 +1639,36 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseDeleteChannel._get_http_options() + request, metadata = self._interceptor.pre_delete_channel(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseDeleteChannel._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseDeleteChannel._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteChannel", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "DeleteChannel", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = EventarcRestTransport._DeleteChannel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1135,7 +1680,29 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_channel(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_channel_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.delete_channel", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "DeleteChannel", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _DeleteChannelConnection(_BaseEventarcRestTransport._BaseDeleteChannelConnection, EventarcRestStub): @@ -1168,7 +1735,7 @@ def __call__(self, request: eventarc.DeleteChannelConnectionRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the delete channel connection method over HTTP. @@ -1179,8 +1746,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1191,12 +1760,36 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseDeleteChannelConnection._get_http_options() + request, metadata = self._interceptor.pre_delete_channel_connection(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseDeleteChannelConnection._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseDeleteChannelConnection._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteChannelConnection", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "DeleteChannelConnection", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = EventarcRestTransport._DeleteChannelConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1208,7 +1801,29 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_channel_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_channel_connection_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.delete_channel_connection", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "DeleteChannelConnection", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _DeleteTrigger(_BaseEventarcRestTransport._BaseDeleteTrigger, EventarcRestStub): @@ -1241,7 +1856,7 @@ def __call__(self, request: eventarc.DeleteTriggerRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the delete trigger method over HTTP. @@ -1252,8 +1867,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1264,12 +1881,36 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseDeleteTrigger._get_http_options() + request, metadata = self._interceptor.pre_delete_trigger(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseDeleteTrigger._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseDeleteTrigger._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteTrigger", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "DeleteTrigger", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = EventarcRestTransport._DeleteTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1281,7 +1922,29 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_trigger(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_trigger_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.delete_trigger", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "DeleteTrigger", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _GetChannel(_BaseEventarcRestTransport._BaseGetChannel, EventarcRestStub): @@ -1314,7 +1977,7 @@ def __call__(self, request: eventarc.GetChannelRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> channel.Channel: r"""Call the get channel method over HTTP. @@ -1325,8 +1988,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.channel.Channel: @@ -1342,12 +2007,36 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseGetChannel._get_http_options() + request, metadata = self._interceptor.pre_get_channel(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseGetChannel._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseGetChannel._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetChannel", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetChannel", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = EventarcRestTransport._GetChannel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1361,7 +2050,29 @@ def __call__(self, pb_resp = channel.Channel.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_channel(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_channel_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = channel.Channel.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.get_channel", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetChannel", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _GetChannelConnection(_BaseEventarcRestTransport._BaseGetChannelConnection, EventarcRestStub): @@ -1394,7 +2105,7 @@ def __call__(self, request: eventarc.GetChannelConnectionRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> channel_connection.ChannelConnection: r"""Call the get channel connection method over HTTP. @@ -1405,8 +2116,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.channel_connection.ChannelConnection: @@ -1421,12 +2134,36 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseGetChannelConnection._get_http_options() + request, metadata = self._interceptor.pre_get_channel_connection(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseGetChannelConnection._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseGetChannelConnection._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetChannelConnection", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetChannelConnection", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = EventarcRestTransport._GetChannelConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1440,7 +2177,29 @@ def __call__(self, pb_resp = channel_connection.ChannelConnection.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_channel_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_channel_connection_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = channel_connection.ChannelConnection.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.get_channel_connection", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetChannelConnection", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _GetGoogleChannelConfig(_BaseEventarcRestTransport._BaseGetGoogleChannelConfig, EventarcRestStub): @@ -1473,7 +2232,7 @@ def __call__(self, request: eventarc.GetGoogleChannelConfigRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> google_channel_config.GoogleChannelConfig: r"""Call the get google channel config method over HTTP. @@ -1484,8 +2243,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.google_channel_config.GoogleChannelConfig: @@ -1501,12 +2262,36 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseGetGoogleChannelConfig._get_http_options() + request, metadata = self._interceptor.pre_get_google_channel_config(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseGetGoogleChannelConfig._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseGetGoogleChannelConfig._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetGoogleChannelConfig", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetGoogleChannelConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = EventarcRestTransport._GetGoogleChannelConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1520,7 +2305,29 @@ def __call__(self, pb_resp = google_channel_config.GoogleChannelConfig.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_google_channel_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_google_channel_config_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = google_channel_config.GoogleChannelConfig.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.get_google_channel_config", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetGoogleChannelConfig", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _GetProvider(_BaseEventarcRestTransport._BaseGetProvider, EventarcRestStub): @@ -1553,7 +2360,7 @@ def __call__(self, request: eventarc.GetProviderRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> discovery.Provider: r"""Call the get provider method over HTTP. @@ -1564,8 +2371,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.discovery.Provider: @@ -1575,12 +2384,36 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseGetProvider._get_http_options() + request, metadata = self._interceptor.pre_get_provider(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseGetProvider._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseGetProvider._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetProvider", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetProvider", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = EventarcRestTransport._GetProvider._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1594,7 +2427,29 @@ def __call__(self, pb_resp = discovery.Provider.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_provider(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_provider_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = discovery.Provider.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.get_provider", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetProvider", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _GetTrigger(_BaseEventarcRestTransport._BaseGetTrigger, EventarcRestStub): @@ -1627,7 +2482,7 @@ def __call__(self, request: eventarc.GetTriggerRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> trigger.Trigger: r"""Call the get trigger method over HTTP. @@ -1638,8 +2493,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.trigger.Trigger: @@ -1649,12 +2506,36 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseGetTrigger._get_http_options() + request, metadata = self._interceptor.pre_get_trigger(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseGetTrigger._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseGetTrigger._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetTrigger", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetTrigger", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = EventarcRestTransport._GetTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1668,7 +2549,29 @@ def __call__(self, pb_resp = trigger.Trigger.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_trigger(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_trigger_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = trigger.Trigger.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.get_trigger", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetTrigger", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ListChannelConnections(_BaseEventarcRestTransport._BaseListChannelConnections, EventarcRestStub): @@ -1701,7 +2604,7 @@ def __call__(self, request: eventarc.ListChannelConnectionsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> eventarc.ListChannelConnectionsResponse: r"""Call the list channel connections method over HTTP. @@ -1712,8 +2615,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.eventarc.ListChannelConnectionsResponse: @@ -1723,12 +2628,36 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseListChannelConnections._get_http_options() + request, metadata = self._interceptor.pre_list_channel_connections(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseListChannelConnections._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseListChannelConnections._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListChannelConnections", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "ListChannelConnections", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = EventarcRestTransport._ListChannelConnections._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1742,7 +2671,29 @@ def __call__(self, pb_resp = eventarc.ListChannelConnectionsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_channel_connections(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_channel_connections_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = eventarc.ListChannelConnectionsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.list_channel_connections", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "ListChannelConnections", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ListChannels(_BaseEventarcRestTransport._BaseListChannels, EventarcRestStub): @@ -1775,7 +2726,7 @@ def __call__(self, request: eventarc.ListChannelsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> eventarc.ListChannelsResponse: r"""Call the list channels method over HTTP. @@ -1786,8 +2737,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.eventarc.ListChannelsResponse: @@ -1795,12 +2748,36 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseListChannels._get_http_options() + request, metadata = self._interceptor.pre_list_channels(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseListChannels._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseListChannels._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListChannels", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "ListChannels", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = EventarcRestTransport._ListChannels._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1814,7 +2791,29 @@ def __call__(self, pb_resp = eventarc.ListChannelsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_channels(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_channels_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = eventarc.ListChannelsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.list_channels", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "ListChannels", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ListProviders(_BaseEventarcRestTransport._BaseListProviders, EventarcRestStub): @@ -1847,7 +2846,7 @@ def __call__(self, request: eventarc.ListProvidersRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> eventarc.ListProvidersResponse: r"""Call the list providers method over HTTP. @@ -1858,8 +2857,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.eventarc.ListProvidersResponse: @@ -1867,12 +2868,36 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseListProviders._get_http_options() + request, metadata = self._interceptor.pre_list_providers(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseListProviders._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseListProviders._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListProviders", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "ListProviders", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = EventarcRestTransport._ListProviders._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1886,7 +2911,29 @@ def __call__(self, pb_resp = eventarc.ListProvidersResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_providers(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_providers_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = eventarc.ListProvidersResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.list_providers", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "ListProviders", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ListTriggers(_BaseEventarcRestTransport._BaseListTriggers, EventarcRestStub): @@ -1919,7 +2966,7 @@ def __call__(self, request: eventarc.ListTriggersRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> eventarc.ListTriggersResponse: r"""Call the list triggers method over HTTP. @@ -1930,8 +2977,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.eventarc.ListTriggersResponse: @@ -1939,12 +2988,36 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseListTriggers._get_http_options() + request, metadata = self._interceptor.pre_list_triggers(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseListTriggers._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseListTriggers._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListTriggers", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "ListTriggers", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = EventarcRestTransport._ListTriggers._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1958,7 +3031,29 @@ def __call__(self, pb_resp = eventarc.ListTriggersResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_triggers(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_triggers_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = eventarc.ListTriggersResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.list_triggers", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "ListTriggers", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _UpdateChannel(_BaseEventarcRestTransport._BaseUpdateChannel, EventarcRestStub): @@ -1992,7 +3087,7 @@ def __call__(self, request: eventarc.UpdateChannelRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the update channel method over HTTP. @@ -2003,8 +3098,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -2015,6 +3112,7 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseUpdateChannel._get_http_options() + request, metadata = self._interceptor.pre_update_channel(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseUpdateChannel._get_transcoded_request(http_options, request) @@ -2023,6 +3121,29 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseUpdateChannel._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.UpdateChannel", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "UpdateChannel", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = EventarcRestTransport._UpdateChannel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -2034,7 +3155,29 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_channel(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_channel_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.update_channel", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "UpdateChannel", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _UpdateGoogleChannelConfig(_BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig, EventarcRestStub): @@ -2068,7 +3211,7 @@ def __call__(self, request: eventarc.UpdateGoogleChannelConfigRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> gce_google_channel_config.GoogleChannelConfig: r"""Call the update google channel config method over HTTP. @@ -2080,8 +3223,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.gce_google_channel_config.GoogleChannelConfig: @@ -2097,6 +3242,7 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_http_options() + request, metadata = self._interceptor.pre_update_google_channel_config(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_transcoded_request(http_options, request) @@ -2105,6 +3251,29 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.UpdateGoogleChannelConfig", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "UpdateGoogleChannelConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = EventarcRestTransport._UpdateGoogleChannelConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -2118,7 +3287,29 @@ def __call__(self, pb_resp = gce_google_channel_config.GoogleChannelConfig.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_google_channel_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_google_channel_config_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = gce_google_channel_config.GoogleChannelConfig.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.update_google_channel_config", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "UpdateGoogleChannelConfig", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _UpdateTrigger(_BaseEventarcRestTransport._BaseUpdateTrigger, EventarcRestStub): @@ -2152,7 +3343,7 @@ def __call__(self, request: eventarc.UpdateTriggerRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the update trigger method over HTTP. @@ -2163,8 +3354,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -2175,6 +3368,7 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseUpdateTrigger._get_http_options() + request, metadata = self._interceptor.pre_update_trigger(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseUpdateTrigger._get_transcoded_request(http_options, request) @@ -2183,6 +3377,29 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseUpdateTrigger._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.UpdateTrigger", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "UpdateTrigger", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = EventarcRestTransport._UpdateTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -2194,7 +3411,29 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_trigger(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_trigger_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.update_trigger", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "UpdateTrigger", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property @@ -2346,6 +3585,9 @@ def get_location(self): return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore class _GetLocation(_BaseEventarcRestTransport._BaseGetLocation, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.GetLocation") + @staticmethod def _get_response( host, @@ -2372,7 +3614,7 @@ def __call__(self, request: locations_pb2.GetLocationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> locations_pb2.Location: r"""Call the get location method over HTTP. @@ -2383,20 +3625,46 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: locations_pb2.Location: Response from GetLocation method. """ http_options = _BaseEventarcRestTransport._BaseGetLocation._get_http_options() + request, metadata = self._interceptor.pre_get_location(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetLocation", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetLocation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = EventarcRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -2405,9 +3673,29 @@ def __call__(self, if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = locations_pb2.Location() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcAsyncClient.GetLocation", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetLocation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property @@ -2415,6 +3703,9 @@ def list_locations(self): return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore class _ListLocations(_BaseEventarcRestTransport._BaseListLocations, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.ListLocations") + @staticmethod def _get_response( host, @@ -2441,7 +3732,7 @@ def __call__(self, request: locations_pb2.ListLocationsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> locations_pb2.ListLocationsResponse: r"""Call the list locations method over HTTP. @@ -2452,20 +3743,46 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: locations_pb2.ListLocationsResponse: Response from ListLocations method. """ http_options = _BaseEventarcRestTransport._BaseListLocations._get_http_options() + request, metadata = self._interceptor.pre_list_locations(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseListLocations._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseListLocations._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListLocations", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "ListLocations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = EventarcRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -2474,9 +3791,29 @@ def __call__(self, if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = locations_pb2.ListLocationsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcAsyncClient.ListLocations", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "ListLocations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property @@ -2484,6 +3821,9 @@ def get_iam_policy(self): return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore class _GetIamPolicy(_BaseEventarcRestTransport._BaseGetIamPolicy, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.GetIamPolicy") + @staticmethod def _get_response( host, @@ -2510,7 +3850,7 @@ def __call__(self, request: iam_policy_pb2.GetIamPolicyRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> policy_pb2.Policy: r"""Call the get iam policy method over HTTP. @@ -2521,20 +3861,46 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: policy_pb2.Policy: Response from GetIamPolicy method. """ http_options = _BaseEventarcRestTransport._BaseGetIamPolicy._get_http_options() + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseGetIamPolicy._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseGetIamPolicy._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetIamPolicy", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = EventarcRestTransport._GetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -2543,9 +3909,29 @@ def __call__(self, if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = policy_pb2.Policy() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcAsyncClient.GetIamPolicy", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property @@ -2553,6 +3939,9 @@ def set_iam_policy(self): return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore class _SetIamPolicy(_BaseEventarcRestTransport._BaseSetIamPolicy, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.SetIamPolicy") + @staticmethod def _get_response( host, @@ -2580,7 +3969,7 @@ def __call__(self, request: iam_policy_pb2.SetIamPolicyRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> policy_pb2.Policy: r"""Call the set iam policy method over HTTP. @@ -2591,14 +3980,17 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: policy_pb2.Policy: Response from SetIamPolicy method. """ http_options = _BaseEventarcRestTransport._BaseSetIamPolicy._get_http_options() + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseSetIamPolicy._get_transcoded_request(http_options, request) @@ -2607,6 +3999,29 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseSetIamPolicy._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.SetIamPolicy", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "SetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = EventarcRestTransport._SetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -2615,9 +4030,29 @@ def __call__(self, if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = policy_pb2.Policy() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_set_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcAsyncClient.SetIamPolicy", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "SetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property @@ -2625,6 +4060,9 @@ def test_iam_permissions(self): return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore class _TestIamPermissions(_BaseEventarcRestTransport._BaseTestIamPermissions, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.TestIamPermissions") + @staticmethod def _get_response( host, @@ -2652,7 +4090,7 @@ def __call__(self, request: iam_policy_pb2.TestIamPermissionsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Call the test iam permissions method over HTTP. @@ -2663,14 +4101,17 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. """ http_options = _BaseEventarcRestTransport._BaseTestIamPermissions._get_http_options() + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseTestIamPermissions._get_transcoded_request(http_options, request) @@ -2679,6 +4120,29 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseTestIamPermissions._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.TestIamPermissions", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "TestIamPermissions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = EventarcRestTransport._TestIamPermissions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -2687,9 +4151,29 @@ def __call__(self, if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = iam_policy_pb2.TestIamPermissionsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_test_iam_permissions(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcAsyncClient.TestIamPermissions", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "TestIamPermissions", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property @@ -2697,6 +4181,9 @@ def cancel_operation(self): return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore class _CancelOperation(_BaseEventarcRestTransport._BaseCancelOperation, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.CancelOperation") + @staticmethod def _get_response( host, @@ -2724,7 +4211,7 @@ def __call__(self, request: operations_pb2.CancelOperationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> None: r"""Call the cancel operation method over HTTP. @@ -2735,11 +4222,14 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = _BaseEventarcRestTransport._BaseCancelOperation._get_http_options() + request, metadata = self._interceptor.pre_cancel_operation(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) @@ -2748,6 +4238,29 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.CancelOperation", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = EventarcRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -2763,6 +4276,9 @@ def delete_operation(self): return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore class _DeleteOperation(_BaseEventarcRestTransport._BaseDeleteOperation, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.DeleteOperation") + @staticmethod def _get_response( host, @@ -2789,7 +4305,7 @@ def __call__(self, request: operations_pb2.DeleteOperationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> None: r"""Call the delete operation method over HTTP. @@ -2800,17 +4316,43 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = _BaseEventarcRestTransport._BaseDeleteOperation._get_http_options() + request, metadata = self._interceptor.pre_delete_operation(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteOperation", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = EventarcRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -2826,6 +4368,9 @@ def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore class _GetOperation(_BaseEventarcRestTransport._BaseGetOperation, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.GetOperation") + @staticmethod def _get_response( host, @@ -2852,7 +4397,7 @@ def __call__(self, request: operations_pb2.GetOperationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. @@ -2863,20 +4408,46 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: operations_pb2.Operation: Response from GetOperation method. """ http_options = _BaseEventarcRestTransport._BaseGetOperation._get_http_options() + request, metadata = self._interceptor.pre_get_operation(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetOperation", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = EventarcRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -2885,9 +4456,29 @@ def __call__(self, if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcAsyncClient.GetOperation", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property @@ -2895,6 +4486,9 @@ def list_operations(self): return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore class _ListOperations(_BaseEventarcRestTransport._BaseListOperations, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.ListOperations") + @staticmethod def _get_response( host, @@ -2921,7 +4515,7 @@ def __call__(self, request: operations_pb2.ListOperationsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.ListOperationsResponse: r"""Call the list operations method over HTTP. @@ -2932,20 +4526,46 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: operations_pb2.ListOperationsResponse: Response from ListOperations method. """ http_options = _BaseEventarcRestTransport._BaseListOperations._get_http_options() + request, metadata = self._interceptor.pre_list_operations(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseListOperations._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListOperations", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = EventarcRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -2954,9 +4574,29 @@ def __call__(self, if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcAsyncClient.ListOperations", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py index 660024e9a4..c2bafca03b 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py @@ -22,7 +22,6 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore from .base import EventarcTransport, DEFAULT_CLIENT_INFO -from google.auth import credentials as ga_credentials # type: ignore import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union @@ -53,7 +52,7 @@ class _BaseEventarcRestTransport(EventarcTransport): def __init__(self, *, host: str = 'eventarc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, + credentials: Optional[Any] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, url_scheme: str = 'https', @@ -63,7 +62,7 @@ def __init__(self, *, Args: host (Optional[str]): The hostname to connect to (default: 'eventarc.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The + credentials (Optional[Any]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the @@ -805,6 +804,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseGetLocation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): @@ -828,6 +829,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseListLocations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): @@ -851,6 +854,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseGetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): @@ -882,6 +887,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseSetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): @@ -920,6 +927,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseTestIamPermissions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): @@ -958,6 +967,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): @@ -986,6 +997,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): @@ -1009,6 +1022,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): @@ -1032,6 +1047,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): diff --git a/tests/integration/goldens/eventarc/noxfile.py b/tests/integration/goldens/eventarc/noxfile.py index 61f7c5c43d..685faed09b 100755 --- a/tests/integration/goldens/eventarc/noxfile.py +++ b/tests/integration/goldens/eventarc/noxfile.py @@ -29,7 +29,8 @@ "3.9", "3.10", "3.11", - "3.12" + "3.12", + "3.13", ] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -39,7 +40,7 @@ BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.12" +DEFAULT_PYTHON_VERSION = "3.13" nox.sessions = [ "unit", @@ -61,7 +62,7 @@ def unit(session, protobuf_implementation): """Run the unit test suite.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') @@ -94,7 +95,7 @@ def unit(session, protobuf_implementation): def prerelease_deps(session, protobuf_implementation): """Run the unit test suite against pre-release versions of dependencies.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install test environment dependencies @@ -129,7 +130,8 @@ def prerelease_deps(session, protobuf_implementation): "googleapis-common-protos", "google-api-core", "google-auth", - "grpcio", + # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 + "grpcio!=1.67.0rc1", "grpcio-status", "protobuf", "proto-plus", diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json b/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json index 596aaecb1b..247771789f 100755 --- a/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json +++ b/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json @@ -55,7 +55,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -143,7 +143,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -232,7 +232,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -320,7 +320,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -409,7 +409,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -497,7 +497,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -578,7 +578,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -658,7 +658,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -739,7 +739,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -819,7 +819,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -904,7 +904,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -988,7 +988,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -1069,7 +1069,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.types.ChannelConnection", @@ -1149,7 +1149,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.types.ChannelConnection", @@ -1230,7 +1230,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.types.Channel", @@ -1310,7 +1310,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.types.Channel", @@ -1391,7 +1391,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.types.GoogleChannelConfig", @@ -1471,7 +1471,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.types.GoogleChannelConfig", @@ -1552,7 +1552,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.types.Provider", @@ -1632,7 +1632,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.types.Provider", @@ -1713,7 +1713,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.types.Trigger", @@ -1793,7 +1793,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.types.Trigger", @@ -1874,7 +1874,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelConnectionsAsyncPager", @@ -1954,7 +1954,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelConnectionsPager", @@ -2035,7 +2035,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelsAsyncPager", @@ -2115,7 +2115,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelsPager", @@ -2196,7 +2196,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListProvidersAsyncPager", @@ -2276,7 +2276,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListProvidersPager", @@ -2357,7 +2357,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListTriggersAsyncPager", @@ -2437,7 +2437,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListTriggersPager", @@ -2522,7 +2522,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -2606,7 +2606,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -2691,7 +2691,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.types.GoogleChannelConfig", @@ -2775,7 +2775,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.types.GoogleChannelConfig", @@ -2864,7 +2864,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -2952,7 +2952,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", diff --git a/tests/integration/goldens/eventarc/setup.py b/tests/integration/goldens/eventarc/setup.py index b930a775e4..b54372e5e2 100755 --- a/tests/integration/goldens/eventarc/setup.py +++ b/tests/integration/goldens/eventarc/setup.py @@ -44,9 +44,12 @@ # See https://github.com/googleapis/google-cloud-python/issues/12364 "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", + "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", ] +extras = { +} url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc" package_root = os.path.abspath(os.path.dirname(__file__)) @@ -82,6 +85,7 @@ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Operating System :: OS Independent", "Topic :: Internet", ], @@ -89,6 +93,7 @@ packages=packages, python_requires=">=3.7", install_requires=dependencies, + extras_require=extras, include_package_data=True, zip_safe=False, ) diff --git a/tests/integration/goldens/eventarc/testing/constraints-3.13.txt b/tests/integration/goldens/eventarc/testing/constraints-3.13.txt new file mode 100755 index 0000000000..ad3f0fa58e --- /dev/null +++ b/tests/integration/goldens/eventarc/testing/constraints-3.13.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index c324c8f257..9f7166403f 100755 --- a/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -23,7 +23,7 @@ import grpc from grpc.experimental import aio -from collections.abc import Iterable +from collections.abc import Iterable, AsyncIterable from google.protobuf import json_format import json import math @@ -88,6 +88,13 @@ } CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -881,25 +888,6 @@ def test_get_trigger(request_type, transport: str = 'grpc'): assert response.etag == 'etag_value' -def test_get_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_trigger), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetTriggerRequest() - - def test_get_trigger_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -959,33 +947,6 @@ def test_get_trigger_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_trigger_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(trigger.Trigger( - name='name_value', - uid='uid_value', - service_account='service_account_value', - channel='channel_value', - etag='etag_value', - )) - await client.get_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetTriggerRequest() - - @pytest.mark.asyncio async def test_get_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1064,7 +1025,6 @@ async def test_get_trigger_async(transport: str = 'grpc_asyncio', request_type=e async def test_get_trigger_async_from_dict(): await test_get_trigger_async(request_type=dict) - def test_get_trigger_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1247,25 +1207,6 @@ def test_list_triggers(request_type, transport: str = 'grpc'): assert response.unreachable == ['unreachable_value'] -def test_list_triggers_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_triggers), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_triggers() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListTriggersRequest() - - def test_list_triggers_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1331,30 +1272,6 @@ def test_list_triggers_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_triggers_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListTriggersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_triggers() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListTriggersRequest() - - @pytest.mark.asyncio async def test_list_triggers_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1427,7 +1344,6 @@ async def test_list_triggers_async(transport: str = 'grpc_asyncio', request_type async def test_list_triggers_async_from_dict(): await test_list_triggers_async(request_type=dict) - def test_list_triggers_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1801,25 +1717,6 @@ def test_create_trigger(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_create_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_trigger), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.CreateTriggerRequest() - - def test_create_trigger_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1886,29 +1783,6 @@ def test_create_trigger_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_trigger_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.CreateTriggerRequest() - - @pytest.mark.asyncio async def test_create_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1983,7 +1857,6 @@ async def test_create_trigger_async(transport: str = 'grpc_asyncio', request_typ async def test_create_trigger_async_from_dict(): await test_create_trigger_async(request_type=dict) - def test_create_trigger_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2183,25 +2056,6 @@ def test_update_trigger(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_update_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_trigger), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.UpdateTriggerRequest() - - def test_update_trigger_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2264,29 +2118,6 @@ def test_update_trigger_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_trigger_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.UpdateTriggerRequest() - - @pytest.mark.asyncio async def test_update_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2361,7 +2192,6 @@ async def test_update_trigger_async(transport: str = 'grpc_asyncio', request_typ async def test_update_trigger_async_from_dict(): await test_update_trigger_async(request_type=dict) - def test_update_trigger_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2561,25 +2391,6 @@ def test_delete_trigger(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_delete_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_trigger), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.DeleteTriggerRequest() - - def test_delete_trigger_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2646,29 +2457,6 @@ def test_delete_trigger_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_trigger_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.DeleteTriggerRequest() - - @pytest.mark.asyncio async def test_delete_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2743,7 +2531,6 @@ async def test_delete_trigger_async(transport: str = 'grpc_asyncio', request_typ async def test_delete_trigger_async_from_dict(): await test_delete_trigger_async(request_type=dict) - def test_delete_trigger_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2947,25 +2734,6 @@ def test_get_channel(request_type, transport: str = 'grpc'): assert response.crypto_key_name == 'crypto_key_name_value' -def test_get_channel_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_channel), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_channel() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetChannelRequest() - - def test_get_channel_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3025,34 +2793,6 @@ def test_get_channel_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_channel_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_channel), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(channel.Channel( - name='name_value', - uid='uid_value', - provider='provider_value', - state=channel.Channel.State.PENDING, - activation_token='activation_token_value', - crypto_key_name='crypto_key_name_value', - )) - await client.get_channel() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetChannelRequest() - - @pytest.mark.asyncio async def test_get_channel_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3133,7 +2873,6 @@ async def test_get_channel_async(transport: str = 'grpc_asyncio', request_type=e async def test_get_channel_async_from_dict(): await test_get_channel_async(request_type=dict) - def test_get_channel_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3316,25 +3055,6 @@ def test_list_channels(request_type, transport: str = 'grpc'): assert response.unreachable == ['unreachable_value'] -def test_list_channels_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_channels), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_channels() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListChannelsRequest() - - def test_list_channels_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3398,30 +3118,6 @@ def test_list_channels_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_channels_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_channels), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_channels() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListChannelsRequest() - - @pytest.mark.asyncio async def test_list_channels_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3494,7 +3190,6 @@ async def test_list_channels_async(transport: str = 'grpc_asyncio', request_type async def test_list_channels_async_from_dict(): await test_list_channels_async(request_type=dict) - def test_list_channels_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3868,39 +3563,20 @@ def test_create_channel(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_create_channel_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_create_channel_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_channel_), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_channel() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.CreateChannelRequest() - - -def test_create_channel_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = eventarc.CreateChannelRequest( - parent='parent_value', - channel_id='channel_id_value', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.CreateChannelRequest( + parent='parent_value', + channel_id='channel_id_value', ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3953,29 +3629,6 @@ def test_create_channel_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_channel_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_channel_), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_channel() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.CreateChannelRequest() - - @pytest.mark.asyncio async def test_create_channel_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4050,7 +3703,6 @@ async def test_create_channel_async(transport: str = 'grpc_asyncio', request_typ async def test_create_channel_async_from_dict(): await test_create_channel_async(request_type=dict) - def test_create_channel_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4250,25 +3902,6 @@ def test_update_channel(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_update_channel_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_channel), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_channel() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.UpdateChannelRequest() - - def test_update_channel_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4331,29 +3964,6 @@ def test_update_channel_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_channel_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_channel), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_channel() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.UpdateChannelRequest() - - @pytest.mark.asyncio async def test_update_channel_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4428,7 +4038,6 @@ async def test_update_channel_async(transport: str = 'grpc_asyncio', request_typ async def test_update_channel_async_from_dict(): await test_update_channel_async(request_type=dict) - def test_update_channel_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4618,25 +4227,6 @@ def test_delete_channel(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_delete_channel_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_channel), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_channel() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.DeleteChannelRequest() - - def test_delete_channel_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4701,29 +4291,6 @@ def test_delete_channel_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_channel_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_channel), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_channel() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.DeleteChannelRequest() - - @pytest.mark.asyncio async def test_delete_channel_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4798,7 +4365,6 @@ async def test_delete_channel_async(transport: str = 'grpc_asyncio', request_typ async def test_delete_channel_async_from_dict(): await test_delete_channel_async(request_type=dict) - def test_delete_channel_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4983,25 +4549,6 @@ def test_get_provider(request_type, transport: str = 'grpc'): assert response.display_name == 'display_name_value' -def test_get_provider_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_provider), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_provider() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetProviderRequest() - - def test_get_provider_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -5061,30 +4608,6 @@ def test_get_provider_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_provider_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_provider), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(discovery.Provider( - name='name_value', - display_name='display_name_value', - )) - await client.get_provider() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetProviderRequest() - - @pytest.mark.asyncio async def test_get_provider_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5157,7 +4680,6 @@ async def test_get_provider_async(transport: str = 'grpc_asyncio', request_type= async def test_get_provider_async_from_dict(): await test_get_provider_async(request_type=dict) - def test_get_provider_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5340,25 +4862,6 @@ def test_list_providers(request_type, transport: str = 'grpc'): assert response.unreachable == ['unreachable_value'] -def test_list_providers_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_providers), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_providers() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListProvidersRequest() - - def test_list_providers_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -5424,30 +4927,6 @@ def test_list_providers_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_providers_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_providers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListProvidersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_providers() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListProvidersRequest() - - @pytest.mark.asyncio async def test_list_providers_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5520,7 +4999,6 @@ async def test_list_providers_async(transport: str = 'grpc_asyncio', request_typ async def test_list_providers_async_from_dict(): await test_list_providers_async(request_type=dict) - def test_list_providers_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5903,25 +5381,6 @@ def test_get_channel_connection(request_type, transport: str = 'grpc'): assert response.activation_token == 'activation_token_value' -def test_get_channel_connection_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_channel_connection), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_channel_connection() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetChannelConnectionRequest() - - def test_get_channel_connection_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -5981,32 +5440,6 @@ def test_get_channel_connection_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_channel_connection_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_channel_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(channel_connection.ChannelConnection( - name='name_value', - uid='uid_value', - channel='channel_value', - activation_token='activation_token_value', - )) - await client.get_channel_connection() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetChannelConnectionRequest() - - @pytest.mark.asyncio async def test_get_channel_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6083,7 +5516,6 @@ async def test_get_channel_connection_async(transport: str = 'grpc_asyncio', req async def test_get_channel_connection_async_from_dict(): await test_get_channel_connection_async(request_type=dict) - def test_get_channel_connection_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6266,25 +5698,6 @@ def test_list_channel_connections(request_type, transport: str = 'grpc'): assert response.unreachable == ['unreachable_value'] -def test_list_channel_connections_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_channel_connections), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_channel_connections() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListChannelConnectionsRequest() - - def test_list_channel_connections_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -6346,30 +5759,6 @@ def test_list_channel_connections_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_channel_connections_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_channel_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelConnectionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_channel_connections() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListChannelConnectionsRequest() - - @pytest.mark.asyncio async def test_list_channel_connections_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6442,7 +5831,6 @@ async def test_list_channel_connections_async(transport: str = 'grpc_asyncio', r async def test_list_channel_connections_async_from_dict(): await test_list_channel_connections_async(request_type=dict) - def test_list_channel_connections_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6816,28 +6204,9 @@ def test_create_channel_connection(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_create_channel_connection_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_channel_connection), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_channel_connection() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.CreateChannelConnectionRequest() - - -def test_create_channel_connection_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. +def test_create_channel_connection_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport='grpc', @@ -6901,29 +6270,6 @@ def test_create_channel_connection_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_channel_connection_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_channel_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_channel_connection() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.CreateChannelConnectionRequest() - - @pytest.mark.asyncio async def test_create_channel_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6998,7 +6344,6 @@ async def test_create_channel_connection_async(transport: str = 'grpc_asyncio', async def test_create_channel_connection_async_from_dict(): await test_create_channel_connection_async(request_type=dict) - def test_create_channel_connection_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7198,25 +6543,6 @@ def test_delete_channel_connection(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_delete_channel_connection_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_channel_connection), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_channel_connection() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.DeleteChannelConnectionRequest() - - def test_delete_channel_connection_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -7281,29 +6607,6 @@ def test_delete_channel_connection_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_channel_connection_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_channel_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_channel_connection() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.DeleteChannelConnectionRequest() - - @pytest.mark.asyncio async def test_delete_channel_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7378,7 +6681,6 @@ async def test_delete_channel_connection_async(transport: str = 'grpc_asyncio', async def test_delete_channel_connection_async_from_dict(): await test_delete_channel_connection_async(request_type=dict) - def test_delete_channel_connection_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7563,25 +6865,6 @@ def test_get_google_channel_config(request_type, transport: str = 'grpc'): assert response.crypto_key_name == 'crypto_key_name_value' -def test_get_google_channel_config_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_google_channel_config), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_google_channel_config() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetGoogleChannelConfigRequest() - - def test_get_google_channel_config_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -7641,30 +6924,6 @@ def test_get_google_channel_config_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_google_channel_config_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_google_channel_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(google_channel_config.GoogleChannelConfig( - name='name_value', - crypto_key_name='crypto_key_name_value', - )) - await client.get_google_channel_config() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetGoogleChannelConfigRequest() - - @pytest.mark.asyncio async def test_get_google_channel_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7737,7 +6996,6 @@ async def test_get_google_channel_config_async(transport: str = 'grpc_asyncio', async def test_get_google_channel_config_async_from_dict(): await test_get_google_channel_config_async(request_type=dict) - def test_get_google_channel_config_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7920,25 +7178,6 @@ def test_update_google_channel_config(request_type, transport: str = 'grpc'): assert response.crypto_key_name == 'crypto_key_name_value' -def test_update_google_channel_config_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_google_channel_config), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_google_channel_config() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.UpdateGoogleChannelConfigRequest() - - def test_update_google_channel_config_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -7996,30 +7235,6 @@ def test_update_google_channel_config_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_google_channel_config_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_google_channel_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gce_google_channel_config.GoogleChannelConfig( - name='name_value', - crypto_key_name='crypto_key_name_value', - )) - await client.update_google_channel_config() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.UpdateGoogleChannelConfigRequest() - - @pytest.mark.asyncio async def test_update_google_channel_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -8092,7 +7307,6 @@ async def test_update_google_channel_config_async(transport: str = 'grpc_asyncio async def test_update_google_channel_config_async_from_dict(): await test_update_google_channel_config_async(request_type=dict) - def test_update_google_channel_config_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8248,50 +7462,6 @@ async def test_update_google_channel_config_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [ - eventarc.GetTriggerRequest, - dict, -]) -def test_get_trigger_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = trigger.Trigger( - name='name_value', - uid='uid_value', - service_account='service_account_value', - channel='channel_value', - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = trigger.Trigger.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_trigger(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, trigger.Trigger) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.service_account == 'service_account_value' - assert response.channel == 'channel_value' - assert response.etag == 'etag_value' - def test_get_trigger_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -8387,6 +7557,7 @@ def test_get_trigger_rest_required_fields(request_type=eventarc.GetTriggerReques response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_trigger(request) @@ -8403,66 +7574,6 @@ def test_get_trigger_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_trigger_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_trigger") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_get_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.GetTriggerRequest.pb(eventarc.GetTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = trigger.Trigger.to_json(trigger.Trigger()) - - request = eventarc.GetTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = trigger.Trigger() - - client.get_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetTriggerRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_trigger(request) - - def test_get_trigger_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8491,6 +7602,7 @@ def test_get_trigger_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_trigger(**mock_args) @@ -8516,66 +7628,21 @@ def test_get_trigger_rest_flattened_error(transport: str = 'rest'): ) -def test_get_trigger_rest_error(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - +def test_list_triggers_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) -@pytest.mark.parametrize("request_type", [ - eventarc.ListTriggersRequest, - dict, -]) -def test_list_triggers_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = eventarc.ListTriggersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = eventarc.ListTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_triggers(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTriggersPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - -def test_list_triggers_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_triggers in client._transport._wrapped_methods + # Ensure method has been cached + assert client._transport.list_triggers in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -8658,6 +7725,7 @@ def test_list_triggers_rest_required_fields(request_type=eventarc.ListTriggersRe response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_triggers(request) @@ -8674,66 +7742,6 @@ def test_list_triggers_rest_unset_required_fields(): assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_triggers_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_list_triggers") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_list_triggers") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.ListTriggersRequest.pb(eventarc.ListTriggersRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = eventarc.ListTriggersResponse.to_json(eventarc.ListTriggersResponse()) - - request = eventarc.ListTriggersRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = eventarc.ListTriggersResponse() - - client.list_triggers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_triggers_rest_bad_request(transport: str = 'rest', request_type=eventarc.ListTriggersRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_triggers(request) - - def test_list_triggers_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8762,6 +7770,7 @@ def test_list_triggers_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_triggers(**mock_args) @@ -8849,101 +7858,6 @@ def test_list_triggers_rest_pager(transport: str = 'rest'): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - eventarc.CreateTriggerRequest, - dict, -]) -def test_create_trigger_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["trigger"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value', 'operator': 'operator_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}, 'cloud_function': 'cloud_function_value', 'gke': {'cluster': 'cluster_value', 'location': 'location_value', 'namespace': 'namespace_value', 'service': 'service_value', 'path': 'path_value'}, 'workflow': 'workflow_value'}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'channel': 'channel_value', 'conditions': {}, 'etag': 'etag_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = eventarc.CreateTriggerRequest.meta.fields["trigger"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["trigger"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["trigger"][field])): - del request_init["trigger"][field][i][subfield] - else: - del request_init["trigger"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_trigger(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - def test_create_trigger_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -9057,6 +7971,7 @@ def test_create_trigger_rest_required_fields(request_type=eventarc.CreateTrigger response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_trigger(request) @@ -9081,67 +7996,6 @@ def test_create_trigger_rest_unset_required_fields(): assert set(unset_fields) == (set(("triggerId", "validateOnly", )) & set(("parent", "trigger", "triggerId", "validateOnly", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_trigger_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_create_trigger") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_create_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.CreateTriggerRequest.pb(eventarc.CreateTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = eventarc.CreateTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.create_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.CreateTriggerRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_trigger(request) - - def test_create_trigger_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9170,6 +8024,7 @@ def test_create_trigger_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_trigger(**mock_args) @@ -9197,120 +8052,18 @@ def test_create_trigger_rest_flattened_error(transport: str = 'rest'): ) -def test_create_trigger_rest_error(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) +def test_update_trigger_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - -@pytest.mark.parametrize("request_type", [ - eventarc.UpdateTriggerRequest, - dict, -]) -def test_update_trigger_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} - request_init["trigger"] = {'name': 'projects/sample1/locations/sample2/triggers/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value', 'operator': 'operator_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}, 'cloud_function': 'cloud_function_value', 'gke': {'cluster': 'cluster_value', 'location': 'location_value', 'namespace': 'namespace_value', 'service': 'service_value', 'path': 'path_value'}, 'workflow': 'workflow_value'}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'channel': 'channel_value', 'conditions': {}, 'etag': 'etag_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = eventarc.UpdateTriggerRequest.meta.fields["trigger"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["trigger"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["trigger"][field])): - del request_init["trigger"][field][i][subfield] - else: - del request_init["trigger"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_trigger(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - -def test_update_trigger_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() # Ensure method has been cached assert client._transport.update_trigger in client._transport._wrapped_methods @@ -9401,6 +8154,7 @@ def test_update_trigger_rest_required_fields(request_type=eventarc.UpdateTrigger response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_trigger(request) @@ -9421,67 +8175,6 @@ def test_update_trigger_rest_unset_required_fields(): assert set(unset_fields) == (set(("allowMissing", "updateMask", "validateOnly", )) & set(("validateOnly", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_trigger_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_update_trigger") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_update_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.UpdateTriggerRequest.pb(eventarc.UpdateTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = eventarc.UpdateTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.update_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.UpdateTriggerRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_trigger(request) - - def test_update_trigger_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9510,6 +8203,7 @@ def test_update_trigger_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.update_trigger(**mock_args) @@ -9537,44 +8231,6 @@ def test_update_trigger_rest_flattened_error(transport: str = 'rest'): ) -def test_update_trigger_rest_error(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - eventarc.DeleteTriggerRequest, - dict, -]) -def test_delete_trigger_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_trigger(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - def test_delete_trigger_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -9680,6 +8336,7 @@ def test_delete_trigger_rest_required_fields(request_type=eventarc.DeleteTrigger response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_trigger(request) @@ -9700,67 +8357,6 @@ def test_delete_trigger_rest_unset_required_fields(): assert set(unset_fields) == (set(("allowMissing", "etag", "validateOnly", )) & set(("name", "validateOnly", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_trigger_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_delete_trigger") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.DeleteTriggerRequest.pb(eventarc.DeleteTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = eventarc.DeleteTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.delete_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_delete_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.DeleteTriggerRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_trigger(request) - - def test_delete_trigger_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9788,6 +8384,7 @@ def test_delete_trigger_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_trigger(**mock_args) @@ -9814,72 +8411,18 @@ def test_delete_trigger_rest_flattened_error(transport: str = 'rest'): ) -def test_delete_trigger_rest_error(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) +def test_get_channel_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - -@pytest.mark.parametrize("request_type", [ - eventarc.GetChannelRequest, - dict, -]) -def test_get_channel_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = channel.Channel( - name='name_value', - uid='uid_value', - provider='provider_value', - state=channel.Channel.State.PENDING, - activation_token='activation_token_value', - crypto_key_name='crypto_key_name_value', - pubsub_topic='pubsub_topic_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = channel.Channel.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_channel(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, channel.Channel) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.provider == 'provider_value' - assert response.state == channel.Channel.State.PENDING - assert response.activation_token == 'activation_token_value' - assert response.crypto_key_name == 'crypto_key_name_value' - -def test_get_channel_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() # Ensure method has been cached assert client._transport.get_channel in client._transport._wrapped_methods @@ -9963,6 +8506,7 @@ def test_get_channel_rest_required_fields(request_type=eventarc.GetChannelReques response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_channel(request) @@ -9979,66 +8523,6 @@ def test_get_channel_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_channel_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_channel") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_get_channel") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.GetChannelRequest.pb(eventarc.GetChannelRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = channel.Channel.to_json(channel.Channel()) - - request = eventarc.GetChannelRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = channel.Channel() - - client.get_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_channel_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetChannelRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_channel(request) - - def test_get_channel_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10067,6 +8551,7 @@ def test_get_channel_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_channel(**mock_args) @@ -10092,51 +8577,6 @@ def test_get_channel_rest_flattened_error(transport: str = 'rest'): ) -def test_get_channel_rest_error(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - eventarc.ListChannelsRequest, - dict, -]) -def test_list_channels_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = eventarc.ListChannelsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = eventarc.ListChannelsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_channels(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListChannelsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - def test_list_channels_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -10234,6 +8674,7 @@ def test_list_channels_rest_required_fields(request_type=eventarc.ListChannelsRe response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_channels(request) @@ -10250,66 +8691,6 @@ def test_list_channels_rest_unset_required_fields(): assert set(unset_fields) == (set(("orderBy", "pageSize", "pageToken", )) & set(("parent", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_channels_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_list_channels") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_list_channels") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.ListChannelsRequest.pb(eventarc.ListChannelsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = eventarc.ListChannelsResponse.to_json(eventarc.ListChannelsResponse()) - - request = eventarc.ListChannelsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = eventarc.ListChannelsResponse() - - client.list_channels(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_channels_rest_bad_request(transport: str = 'rest', request_type=eventarc.ListChannelsRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_channels(request) - - def test_list_channels_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10338,6 +8719,7 @@ def test_list_channels_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_channels(**mock_args) @@ -10425,127 +8807,32 @@ def test_list_channels_rest_pager(transport: str = 'rest'): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - eventarc.CreateChannelRequest, - dict, -]) -def test_create_channel_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["channel"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'provider': 'provider_value', 'pubsub_topic': 'pubsub_topic_value', 'state': 1, 'activation_token': 'activation_token_value', 'crypto_key_name': 'crypto_key_name_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 +def test_create_channel_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # Determine if the message type is proto-plus or protobuf - test_field = eventarc.CreateChannelRequest.meta.fields["channel"] + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Ensure method has been cached + assert client._transport.create_channel_ in client._transport._wrapped_methods - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_channel_] = mock_rpc - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + request = {} + client.create_channel(request) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["channel"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["channel"][field])): - del request_init["channel"][field][i][subfield] - else: - del request_init["channel"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_channel(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - -def test_create_channel_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_channel_ in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_channel_] = mock_rpc - - request = {} - client.create_channel(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 # Operation methods build a cached wrapper on first rpc call # subsequent calls should use the cached wrapper @@ -10633,6 +8920,7 @@ def test_create_channel_rest_required_fields(request_type=eventarc.CreateChannel response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_channel(request) @@ -10657,67 +8945,6 @@ def test_create_channel_rest_unset_required_fields(): assert set(unset_fields) == (set(("channelId", "validateOnly", )) & set(("parent", "channel", "channelId", "validateOnly", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_channel_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_create_channel") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_create_channel") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.CreateChannelRequest.pb(eventarc.CreateChannelRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = eventarc.CreateChannelRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.create_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_channel_rest_bad_request(transport: str = 'rest', request_type=eventarc.CreateChannelRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_channel(request) - - def test_create_channel_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10746,6 +8973,7 @@ def test_create_channel_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_channel(**mock_args) @@ -10773,108 +9001,6 @@ def test_create_channel_rest_flattened_error(transport: str = 'rest'): ) -def test_create_channel_rest_error(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - eventarc.UpdateChannelRequest, - dict, -]) -def test_update_channel_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'channel': {'name': 'projects/sample1/locations/sample2/channels/sample3'}} - request_init["channel"] = {'name': 'projects/sample1/locations/sample2/channels/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'provider': 'provider_value', 'pubsub_topic': 'pubsub_topic_value', 'state': 1, 'activation_token': 'activation_token_value', 'crypto_key_name': 'crypto_key_name_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = eventarc.UpdateChannelRequest.meta.fields["channel"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["channel"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["channel"][field])): - del request_init["channel"][field][i][subfield] - else: - del request_init["channel"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_channel(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - def test_update_channel_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -10977,6 +9103,7 @@ def test_update_channel_rest_required_fields(request_type=eventarc.UpdateChannel response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_channel(request) @@ -10997,77 +9124,16 @@ def test_update_channel_rest_unset_required_fields(): assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("validateOnly", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_channel_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( +def test_update_channel_rest_flattened(): + client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_update_channel") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_update_channel") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.UpdateChannelRequest.pb(eventarc.UpdateChannelRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } + transport="rest", + ) - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = eventarc.UpdateChannelRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.update_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_channel_rest_bad_request(transport: str = 'rest', request_type=eventarc.UpdateChannelRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'channel': {'name': 'projects/sample1/locations/sample2/channels/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_channel(request) - - -def test_update_channel_rest_flattened(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') # get arguments that satisfy an http rule for this method sample_request = {'channel': {'name': 'projects/sample1/locations/sample2/channels/sample3'}} @@ -11085,6 +9151,7 @@ def test_update_channel_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.update_channel(**mock_args) @@ -11111,44 +9178,6 @@ def test_update_channel_rest_flattened_error(transport: str = 'rest'): ) -def test_update_channel_rest_error(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - eventarc.DeleteChannelRequest, - dict, -]) -def test_delete_channel_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_channel(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - def test_delete_channel_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -11254,6 +9283,7 @@ def test_delete_channel_rest_required_fields(request_type=eventarc.DeleteChannel response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_channel(request) @@ -11274,67 +9304,6 @@ def test_delete_channel_rest_unset_required_fields(): assert set(unset_fields) == (set(("validateOnly", )) & set(("name", "validateOnly", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_channel_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_delete_channel") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_channel") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.DeleteChannelRequest.pb(eventarc.DeleteChannelRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = eventarc.DeleteChannelRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.delete_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_delete_channel_rest_bad_request(transport: str = 'rest', request_type=eventarc.DeleteChannelRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_channel(request) - - def test_delete_channel_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11361,6 +9330,7 @@ def test_delete_channel_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_channel(**mock_args) @@ -11386,51 +9356,6 @@ def test_delete_channel_rest_flattened_error(transport: str = 'rest'): ) -def test_delete_channel_rest_error(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - eventarc.GetProviderRequest, - dict, -]) -def test_get_provider_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/providers/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = discovery.Provider( - name='name_value', - display_name='display_name_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = discovery.Provider.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_provider(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, discovery.Provider) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - def test_get_provider_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -11526,6 +9451,7 @@ def test_get_provider_rest_required_fields(request_type=eventarc.GetProviderRequ response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_provider(request) @@ -11542,66 +9468,6 @@ def test_get_provider_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_provider_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_provider") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_get_provider") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.GetProviderRequest.pb(eventarc.GetProviderRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = discovery.Provider.to_json(discovery.Provider()) - - request = eventarc.GetProviderRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = discovery.Provider() - - client.get_provider(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_provider_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetProviderRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/providers/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_provider(request) - - def test_get_provider_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11630,6 +9496,7 @@ def test_get_provider_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_provider(**mock_args) @@ -11655,71 +9522,26 @@ def test_get_provider_rest_flattened_error(transport: str = 'rest'): ) -def test_get_provider_rest_error(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) +def test_list_providers_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -@pytest.mark.parametrize("request_type", [ - eventarc.ListProvidersRequest, - dict, -]) -def test_list_providers_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # Ensure method has been cached + assert client._transport.list_providers in client._transport._wrapped_methods - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = eventarc.ListProvidersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = eventarc.ListProvidersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_providers(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListProvidersPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - -def test_list_providers_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_providers in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_providers] = mock_rpc + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_providers] = mock_rpc request = {} client.list_providers(request) @@ -11797,6 +9619,7 @@ def test_list_providers_rest_required_fields(request_type=eventarc.ListProviders response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_providers(request) @@ -11813,66 +9636,6 @@ def test_list_providers_rest_unset_required_fields(): assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_providers_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_list_providers") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_list_providers") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.ListProvidersRequest.pb(eventarc.ListProvidersRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = eventarc.ListProvidersResponse.to_json(eventarc.ListProvidersResponse()) - - request = eventarc.ListProvidersRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = eventarc.ListProvidersResponse() - - client.list_providers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_providers_rest_bad_request(transport: str = 'rest', request_type=eventarc.ListProvidersRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_providers(request) - - def test_list_providers_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11901,6 +9664,7 @@ def test_list_providers_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_providers(**mock_args) @@ -11988,48 +9752,6 @@ def test_list_providers_rest_pager(transport: str = 'rest'): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - eventarc.GetChannelConnectionRequest, - dict, -]) -def test_get_channel_connection_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = channel_connection.ChannelConnection( - name='name_value', - uid='uid_value', - channel='channel_value', - activation_token='activation_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = channel_connection.ChannelConnection.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_channel_connection(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, channel_connection.ChannelConnection) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.channel == 'channel_value' - assert response.activation_token == 'activation_token_value' - def test_get_channel_connection_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -12125,6 +9847,7 @@ def test_get_channel_connection_rest_required_fields(request_type=eventarc.GetCh response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_channel_connection(request) @@ -12141,66 +9864,6 @@ def test_get_channel_connection_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_channel_connection_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_channel_connection") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_get_channel_connection") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.GetChannelConnectionRequest.pb(eventarc.GetChannelConnectionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = channel_connection.ChannelConnection.to_json(channel_connection.ChannelConnection()) - - request = eventarc.GetChannelConnectionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = channel_connection.ChannelConnection() - - client.get_channel_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_channel_connection_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetChannelConnectionRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_channel_connection(request) - - def test_get_channel_connection_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12229,6 +9892,7 @@ def test_get_channel_connection_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_channel_connection(**mock_args) @@ -12254,51 +9918,6 @@ def test_get_channel_connection_rest_flattened_error(transport: str = 'rest'): ) -def test_get_channel_connection_rest_error(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - eventarc.ListChannelConnectionsRequest, - dict, -]) -def test_list_channel_connections_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = eventarc.ListChannelConnectionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = eventarc.ListChannelConnectionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_channel_connections(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListChannelConnectionsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - def test_list_channel_connections_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -12396,6 +10015,7 @@ def test_list_channel_connections_rest_required_fields(request_type=eventarc.Lis response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_channel_connections(request) @@ -12412,76 +10032,16 @@ def test_list_channel_connections_rest_unset_required_fields(): assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_channel_connections_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( +def test_list_channel_connections_rest_flattened(): + client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_list_channel_connections") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_list_channel_connections") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.ListChannelConnectionsRequest.pb(eventarc.ListChannelConnectionsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } + transport="rest", + ) - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = eventarc.ListChannelConnectionsResponse.to_json(eventarc.ListChannelConnectionsResponse()) - - request = eventarc.ListChannelConnectionsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = eventarc.ListChannelConnectionsResponse() - - client.list_channel_connections(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_channel_connections_rest_bad_request(transport: str = 'rest', request_type=eventarc.ListChannelConnectionsRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_channel_connections(request) - - -def test_list_channel_connections_rest_flattened(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = eventarc.ListChannelConnectionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListChannelConnectionsResponse() # get arguments that satisfy an http rule for this method sample_request = {'parent': 'projects/sample1/locations/sample2'} @@ -12500,6 +10060,7 @@ def test_list_channel_connections_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_channel_connections(**mock_args) @@ -12587,101 +10148,6 @@ def test_list_channel_connections_rest_pager(transport: str = 'rest'): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - eventarc.CreateChannelConnectionRequest, - dict, -]) -def test_create_channel_connection_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["channel_connection"] = {'name': 'name_value', 'uid': 'uid_value', 'channel': 'channel_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'activation_token': 'activation_token_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = eventarc.CreateChannelConnectionRequest.meta.fields["channel_connection"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["channel_connection"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["channel_connection"][field])): - del request_init["channel_connection"][field][i][subfield] - else: - del request_init["channel_connection"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_channel_connection(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - def test_create_channel_connection_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -12788,6 +10254,7 @@ def test_create_channel_connection_rest_required_fields(request_type=eventarc.Cr response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_channel_connection(request) @@ -12808,67 +10275,6 @@ def test_create_channel_connection_rest_unset_required_fields(): assert set(unset_fields) == (set(("channelConnectionId", )) & set(("parent", "channelConnection", "channelConnectionId", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_channel_connection_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_create_channel_connection") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_create_channel_connection") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.CreateChannelConnectionRequest.pb(eventarc.CreateChannelConnectionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = eventarc.CreateChannelConnectionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.create_channel_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_channel_connection_rest_bad_request(transport: str = 'rest', request_type=eventarc.CreateChannelConnectionRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_channel_connection(request) - - def test_create_channel_connection_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12897,6 +10303,7 @@ def test_create_channel_connection_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_channel_connection(**mock_args) @@ -12924,44 +10331,6 @@ def test_create_channel_connection_rest_flattened_error(transport: str = 'rest') ) -def test_create_channel_connection_rest_error(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - eventarc.DeleteChannelConnectionRequest, - dict, -]) -def test_delete_channel_connection_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_channel_connection(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - def test_delete_channel_connection_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -13058,6 +10427,7 @@ def test_delete_channel_connection_rest_required_fields(request_type=eventarc.De response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_channel_connection(request) @@ -13074,71 +10444,10 @@ def test_delete_channel_connection_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_channel_connection_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_delete_channel_connection") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_channel_connection") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.DeleteChannelConnectionRequest.pb(eventarc.DeleteChannelConnectionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = eventarc.DeleteChannelConnectionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.delete_channel_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_delete_channel_connection_rest_bad_request(transport: str = 'rest', request_type=eventarc.DeleteChannelConnectionRequest): +def test_delete_channel_connection_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_channel_connection(request) - - -def test_delete_channel_connection_rest_flattened(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="rest", ) # Mock the http request call within the method and fake a response. @@ -13161,6 +10470,7 @@ def test_delete_channel_connection_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_channel_connection(**mock_args) @@ -13186,51 +10496,6 @@ def test_delete_channel_connection_rest_flattened_error(transport: str = 'rest') ) -def test_delete_channel_connection_rest_error(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - eventarc.GetGoogleChannelConfigRequest, - dict, -]) -def test_get_google_channel_config_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/googleChannelConfig'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = google_channel_config.GoogleChannelConfig( - name='name_value', - crypto_key_name='crypto_key_name_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = google_channel_config.GoogleChannelConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_google_channel_config(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, google_channel_config.GoogleChannelConfig) - assert response.name == 'name_value' - assert response.crypto_key_name == 'crypto_key_name_value' - def test_get_google_channel_config_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -13326,6 +10591,7 @@ def test_get_google_channel_config_rest_required_fields(request_type=eventarc.Ge response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_google_channel_config(request) @@ -13342,66 +10608,6 @@ def test_get_google_channel_config_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_google_channel_config_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_google_channel_config") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_get_google_channel_config") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.GetGoogleChannelConfigRequest.pb(eventarc.GetGoogleChannelConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = google_channel_config.GoogleChannelConfig.to_json(google_channel_config.GoogleChannelConfig()) - - request = eventarc.GetGoogleChannelConfigRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = google_channel_config.GoogleChannelConfig() - - client.get_google_channel_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_google_channel_config_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetGoogleChannelConfigRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/googleChannelConfig'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_google_channel_config(request) - - def test_get_google_channel_config_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13430,6 +10636,7 @@ def test_get_google_channel_config_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_google_channel_config(**mock_args) @@ -13455,115 +10662,6 @@ def test_get_google_channel_config_rest_flattened_error(transport: str = 'rest') ) -def test_get_google_channel_config_rest_error(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - eventarc.UpdateGoogleChannelConfigRequest, - dict, -]) -def test_update_google_channel_config_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'google_channel_config': {'name': 'projects/sample1/locations/sample2/googleChannelConfig'}} - request_init["google_channel_config"] = {'name': 'projects/sample1/locations/sample2/googleChannelConfig', 'update_time': {'seconds': 751, 'nanos': 543}, 'crypto_key_name': 'crypto_key_name_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = eventarc.UpdateGoogleChannelConfigRequest.meta.fields["google_channel_config"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["google_channel_config"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["google_channel_config"][field])): - del request_init["google_channel_config"][field][i][subfield] - else: - del request_init["google_channel_config"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gce_google_channel_config.GoogleChannelConfig( - name='name_value', - crypto_key_name='crypto_key_name_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gce_google_channel_config.GoogleChannelConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_google_channel_config(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gce_google_channel_config.GoogleChannelConfig) - assert response.name == 'name_value' - assert response.crypto_key_name == 'crypto_key_name_value' - def test_update_google_channel_config_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -13657,6 +10755,7 @@ def test_update_google_channel_config_rest_required_fields(request_type=eventarc response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_google_channel_config(request) @@ -13673,70 +10772,10 @@ def test_update_google_channel_config_rest_unset_required_fields(): assert set(unset_fields) == (set(("updateMask", )) & set(("googleChannelConfig", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_google_channel_config_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_update_google_channel_config") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_update_google_channel_config") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.UpdateGoogleChannelConfigRequest.pb(eventarc.UpdateGoogleChannelConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = gce_google_channel_config.GoogleChannelConfig.to_json(gce_google_channel_config.GoogleChannelConfig()) - - request = eventarc.UpdateGoogleChannelConfigRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gce_google_channel_config.GoogleChannelConfig() - - client.update_google_channel_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_google_channel_config_rest_bad_request(transport: str = 'rest', request_type=eventarc.UpdateGoogleChannelConfigRequest): +def test_update_google_channel_config_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'google_channel_config': {'name': 'projects/sample1/locations/sample2/googleChannelConfig'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_google_channel_config(request) - - -def test_update_google_channel_config_rest_flattened(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="rest", ) # Mock the http request call within the method and fake a response. @@ -13762,6 +10801,7 @@ def test_update_google_channel_config_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.update_google_channel_config(**mock_args) @@ -13788,13 +10828,6 @@ def test_update_google_channel_config_rest_flattened_error(transport: str = 'res ) -def test_update_google_channel_config_rest_error(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.EventarcGrpcTransport( @@ -13889,1277 +10922,4947 @@ def test_transport_kind_grpc(): assert transport.kind == "grpc" -def test_transport_kind_grpc_asyncio(): - transport = EventarcAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() +def test_initialize_client_w_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" ) - assert transport.kind == "grpc_asyncio" + assert client is not None -def test_transport_kind_rest(): - transport = EventarcClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_trigger_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - assert transport.kind == "rest" + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_trigger), + '__call__') as call: + call.return_value = trigger.Trigger() + client.get_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetTriggerRequest() + + assert args[0] == request_msg -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_triggers_empty_call_grpc(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - assert isinstance( - client.transport, - transports.EventarcGrpcTransport, + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_triggers), + '__call__') as call: + call.return_value = eventarc.ListTriggersResponse() + client.list_triggers(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListTriggersRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_trigger_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) -def test_eventarc_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.EventarcTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_trigger), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_trigger(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.CreateTriggerRequest() -def test_eventarc_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.EventarcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) + assert args[0] == request_msg - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'get_trigger', - 'list_triggers', - 'create_trigger', - 'update_trigger', - 'delete_trigger', - 'get_channel', - 'list_channels', - 'create_channel_', - 'update_channel', - 'delete_channel', - 'get_provider', - 'list_providers', - 'get_channel_connection', - 'list_channel_connections', - 'create_channel_connection', - 'delete_channel_connection', - 'get_google_channel_config', - 'update_google_channel_config', - 'set_iam_policy', - 'get_iam_policy', - 'test_iam_permissions', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_trigger_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - with pytest.raises(NotImplementedError): - transport.close() + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_trigger), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_trigger(request=None) - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.UpdateTriggerRequest() - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() + assert args[0] == request_msg -def test_eventarc_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.EventarcTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_trigger_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_trigger), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_trigger(request=None) -def test_eventarc_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.EventarcTransport() - adc.assert_called_once() + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.DeleteTriggerRequest() + assert args[0] == request_msg -def test_eventarc_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - EventarcClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_channel_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.EventarcGrpcTransport, - transports.EventarcGrpcAsyncIOTransport, - ], -) -def test_eventarc_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_channel), + '__call__') as call: + call.return_value = channel.Channel() + client.get_channel(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetChannelRequest() -@pytest.mark.parametrize( - "transport_class", - [ - transports.EventarcGrpcTransport, - transports.EventarcGrpcAsyncIOTransport, - transports.EventarcRestTransport, - ], -) -def test_eventarc_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) + assert args[0] == request_msg -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.EventarcGrpcTransport, grpc_helpers), - (transports.EventarcGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_eventarc_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_channels_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - create_channel.assert_called_with( - "eventarc.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="eventarc.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__') as call: + call.return_value = eventarc.ListChannelsResponse() + client.list_channels(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListChannelsRequest() -@pytest.mark.parametrize("transport_class", [transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport]) -def test_eventarc_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() + assert args[0] == request_msg - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_channel_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) -def test_eventarc_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.EventarcRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_channel(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.CreateChannelRequest() -def test_eventarc_rest_lro_client(): + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_channel_empty_call_grpc(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="grpc", ) - transport = client.transport - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.AbstractOperationsClient, - ) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_channel), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_channel(request=None) - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.UpdateChannelRequest() + assert args[0] == request_msg -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_eventarc_host_no_port(transport_name): + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_channel_empty_call_grpc(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='eventarc.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'eventarc.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://eventarc.googleapis.com' + transport="grpc", ) -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_eventarc_host_with_port(transport_name): + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_channel(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.DeleteChannelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_provider_empty_call_grpc(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='eventarc.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'eventarc.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://eventarc.googleapis.com:8000' + transport="grpc", ) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_eventarc_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = EventarcClient( - credentials=creds1, - transport=transport_name, - ) - client2 = EventarcClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.get_trigger._session - session2 = client2.transport.get_trigger._session - assert session1 != session2 - session1 = client1.transport.list_triggers._session - session2 = client2.transport.list_triggers._session - assert session1 != session2 - session1 = client1.transport.create_trigger._session - session2 = client2.transport.create_trigger._session - assert session1 != session2 - session1 = client1.transport.update_trigger._session - session2 = client2.transport.update_trigger._session - assert session1 != session2 - session1 = client1.transport.delete_trigger._session - session2 = client2.transport.delete_trigger._session - assert session1 != session2 - session1 = client1.transport.get_channel._session - session2 = client2.transport.get_channel._session - assert session1 != session2 - session1 = client1.transport.list_channels._session - session2 = client2.transport.list_channels._session - assert session1 != session2 - session1 = client1.transport.create_channel_._session - session2 = client2.transport.create_channel_._session - assert session1 != session2 - session1 = client1.transport.update_channel._session - session2 = client2.transport.update_channel._session - assert session1 != session2 - session1 = client1.transport.delete_channel._session - session2 = client2.transport.delete_channel._session - assert session1 != session2 - session1 = client1.transport.get_provider._session - session2 = client2.transport.get_provider._session - assert session1 != session2 - session1 = client1.transport.list_providers._session - session2 = client2.transport.list_providers._session - assert session1 != session2 - session1 = client1.transport.get_channel_connection._session - session2 = client2.transport.get_channel_connection._session - assert session1 != session2 - session1 = client1.transport.list_channel_connections._session - session2 = client2.transport.list_channel_connections._session - assert session1 != session2 - session1 = client1.transport.create_channel_connection._session - session2 = client2.transport.create_channel_connection._session - assert session1 != session2 - session1 = client1.transport.delete_channel_connection._session - session2 = client2.transport.delete_channel_connection._session - assert session1 != session2 - session1 = client1.transport.get_google_channel_config._session - session2 = client2.transport.get_google_channel_config._session - assert session1 != session2 - session1 = client1.transport.update_google_channel_config._session - session2 = client2.transport.update_google_channel_config._session - assert session1 != session2 -def test_eventarc_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_provider), + '__call__') as call: + call.return_value = discovery.Provider() + client.get_provider(request=None) - # Check that channel is used if provided. - transport = transports.EventarcGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetProviderRequest() + assert args[0] == request_msg -def test_eventarc_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - # Check that channel is used if provided. - transport = transports.EventarcGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_providers_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__') as call: + call.return_value = eventarc.ListProvidersResponse() + client.list_providers(request=None) -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport]) -def test_eventarc_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListProvidersRequest() - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel + assert args[0] == request_msg - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_channel_connection_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport]) -def test_eventarc_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_channel_connection), + '__call__') as call: + call.return_value = channel_connection.ChannelConnection() + client.get_channel_connection(request=None) - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetChannelConnectionRequest() - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel + assert args[0] == request_msg -def test_eventarc_grpc_lro_client(): +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_channel_connections_empty_call_grpc(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) - transport = client.transport - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__') as call: + call.return_value = eventarc.ListChannelConnectionsResponse() + client.list_channel_connections(request=None) - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListChannelConnectionsRequest() + assert args[0] == request_msg -def test_eventarc_grpc_lro_async_client(): - client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_channel_connection_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_connection), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_channel_connection(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.CreateChannelConnectionRequest() -def test_channel_path(): - project = "squid" - location = "clam" - channel = "whelk" - expected = "projects/{project}/locations/{location}/channels/{channel}".format(project=project, location=location, channel=channel, ) - actual = EventarcClient.channel_path(project, location, channel) - assert expected == actual + assert args[0] == request_msg -def test_parse_channel_path(): - expected = { - "project": "octopus", - "location": "oyster", - "channel": "nudibranch", - } - path = EventarcClient.channel_path(**expected) +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_channel_connection_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - # Check that the path construction is reversible. - actual = EventarcClient.parse_channel_path(path) - assert expected == actual + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel_connection), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_channel_connection(request=None) -def test_channel_connection_path(): - project = "cuttlefish" - location = "mussel" - channel_connection = "winkle" - expected = "projects/{project}/locations/{location}/channelConnections/{channel_connection}".format(project=project, location=location, channel_connection=channel_connection, ) - actual = EventarcClient.channel_connection_path(project, location, channel_connection) - assert expected == actual + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.DeleteChannelConnectionRequest() + assert args[0] == request_msg -def test_parse_channel_connection_path(): - expected = { - "project": "nautilus", - "location": "scallop", - "channel_connection": "abalone", - } - path = EventarcClient.channel_connection_path(**expected) - # Check that the path construction is reversible. - actual = EventarcClient.parse_channel_connection_path(path) - assert expected == actual +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_google_channel_config_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) -def test_cloud_function_path(): - project = "squid" - location = "clam" - function = "whelk" - expected = "projects/{project}/locations/{location}/functions/{function}".format(project=project, location=location, function=function, ) - actual = EventarcClient.cloud_function_path(project, location, function) - assert expected == actual + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_google_channel_config), + '__call__') as call: + call.return_value = google_channel_config.GoogleChannelConfig() + client.get_google_channel_config(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetGoogleChannelConfigRequest() -def test_parse_cloud_function_path(): - expected = { - "project": "octopus", - "location": "oyster", - "function": "nudibranch", - } - path = EventarcClient.cloud_function_path(**expected) + assert args[0] == request_msg - # Check that the path construction is reversible. - actual = EventarcClient.parse_cloud_function_path(path) - assert expected == actual -def test_crypto_key_path(): - project = "cuttlefish" - location = "mussel" - key_ring = "winkle" - crypto_key = "nautilus" - expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, ) - actual = EventarcClient.crypto_key_path(project, location, key_ring, crypto_key) - assert expected == actual +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_google_channel_config_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_google_channel_config), + '__call__') as call: + call.return_value = gce_google_channel_config.GoogleChannelConfig() + client.update_google_channel_config(request=None) -def test_parse_crypto_key_path(): - expected = { - "project": "scallop", - "location": "abalone", - "key_ring": "squid", - "crypto_key": "clam", - } - path = EventarcClient.crypto_key_path(**expected) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.UpdateGoogleChannelConfigRequest() - # Check that the path construction is reversible. - actual = EventarcClient.parse_crypto_key_path(path) - assert expected == actual + assert args[0] == request_msg -def test_google_channel_config_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}/googleChannelConfig".format(project=project, location=location, ) - actual = EventarcClient.google_channel_config_path(project, location) - assert expected == actual +def test_transport_kind_grpc_asyncio(): + transport = EventarcAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" -def test_parse_google_channel_config_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = EventarcClient.google_channel_config_path(**expected) - # Check that the path construction is reversible. - actual = EventarcClient.parse_google_channel_config_path(path) - assert expected == actual +def test_initialize_client_w_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None -def test_provider_path(): - project = "cuttlefish" - location = "mussel" - provider = "winkle" - expected = "projects/{project}/locations/{location}/providers/{provider}".format(project=project, location=location, provider=provider, ) - actual = EventarcClient.provider_path(project, location, provider) - assert expected == actual +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_trigger_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) -def test_parse_provider_path(): - expected = { - "project": "nautilus", - "location": "scallop", - "provider": "abalone", - } - path = EventarcClient.provider_path(**expected) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(trigger.Trigger( + name='name_value', + uid='uid_value', + service_account='service_account_value', + channel='channel_value', + etag='etag_value', + )) + await client.get_trigger(request=None) - # Check that the path construction is reversible. - actual = EventarcClient.parse_provider_path(path) - assert expected == actual + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetTriggerRequest() -def test_service_path(): - expected = "*".format() - actual = EventarcClient.service_path() - assert expected == actual + assert args[0] == request_msg -def test_parse_service_path(): - expected = { - } - path = EventarcClient.service_path(**expected) +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_triggers_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) - # Check that the path construction is reversible. - actual = EventarcClient.parse_service_path(path) - assert expected == actual + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListTriggersResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_triggers(request=None) -def test_service_account_path(): - project = "squid" - service_account = "clam" - expected = "projects/{project}/serviceAccounts/{service_account}".format(project=project, service_account=service_account, ) - actual = EventarcClient.service_account_path(project, service_account) - assert expected == actual + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListTriggersRequest() + assert args[0] == request_msg -def test_parse_service_account_path(): - expected = { - "project": "whelk", - "service_account": "octopus", - } - path = EventarcClient.service_account_path(**expected) - # Check that the path construction is reversible. - actual = EventarcClient.parse_service_account_path(path) - assert expected == actual +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_trigger_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) -def test_trigger_path(): - project = "oyster" - location = "nudibranch" - trigger = "cuttlefish" - expected = "projects/{project}/locations/{location}/triggers/{trigger}".format(project=project, location=location, trigger=trigger, ) - actual = EventarcClient.trigger_path(project, location, trigger) - assert expected == actual + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_trigger(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.CreateTriggerRequest() -def test_parse_trigger_path(): - expected = { - "project": "mussel", - "location": "winkle", - "trigger": "nautilus", - } - path = EventarcClient.trigger_path(**expected) + assert args[0] == request_msg - # Check that the path construction is reversible. - actual = EventarcClient.parse_trigger_path(path) - assert expected == actual -def test_workflow_path(): - project = "scallop" - location = "abalone" - workflow = "squid" - expected = "projects/{project}/locations/{location}/workflows/{workflow}".format(project=project, location=location, workflow=workflow, ) - actual = EventarcClient.workflow_path(project, location, workflow) - assert expected == actual +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_trigger_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_trigger(request=None) -def test_parse_workflow_path(): - expected = { - "project": "clam", - "location": "whelk", - "workflow": "octopus", - } - path = EventarcClient.workflow_path(**expected) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.UpdateTriggerRequest() - # Check that the path construction is reversible. - actual = EventarcClient.parse_workflow_path(path) - assert expected == actual + assert args[0] == request_msg -def test_common_billing_account_path(): - billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = EventarcClient.common_billing_account_path(billing_account) - assert expected == actual +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_trigger_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nudibranch", - } - path = EventarcClient.common_billing_account_path(**expected) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_trigger(request=None) - # Check that the path construction is reversible. - actual = EventarcClient.parse_common_billing_account_path(path) - assert expected == actual + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.DeleteTriggerRequest() -def test_common_folder_path(): - folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) - actual = EventarcClient.common_folder_path(folder) - assert expected == actual + assert args[0] == request_msg -def test_parse_common_folder_path(): - expected = { - "folder": "mussel", - } - path = EventarcClient.common_folder_path(**expected) +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_channel_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) - # Check that the path construction is reversible. - actual = EventarcClient.parse_common_folder_path(path) - assert expected == actual + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(channel.Channel( + name='name_value', + uid='uid_value', + provider='provider_value', + state=channel.Channel.State.PENDING, + activation_token='activation_token_value', + crypto_key_name='crypto_key_name_value', + )) + await client.get_channel(request=None) -def test_common_organization_path(): - organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) - actual = EventarcClient.common_organization_path(organization) - assert expected == actual + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetChannelRequest() + assert args[0] == request_msg -def test_parse_common_organization_path(): - expected = { - "organization": "nautilus", - } - path = EventarcClient.common_organization_path(**expected) - # Check that the path construction is reversible. - actual = EventarcClient.parse_common_organization_path(path) - assert expected == actual +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_channels_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) -def test_common_project_path(): - project = "scallop" - expected = "projects/{project}".format(project=project, ) - actual = EventarcClient.common_project_path(project) - assert expected == actual + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_channels(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListChannelsRequest() -def test_parse_common_project_path(): - expected = { - "project": "abalone", - } - path = EventarcClient.common_project_path(**expected) + assert args[0] == request_msg - # Check that the path construction is reversible. - actual = EventarcClient.parse_common_project_path(path) - assert expected == actual -def test_common_location_path(): - project = "squid" - location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = EventarcClient.common_location_path(project, location) - assert expected == actual +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_channel_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_channel(request=None) -def test_parse_common_location_path(): - expected = { - "project": "whelk", - "location": "octopus", - } - path = EventarcClient.common_location_path(**expected) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.CreateChannelRequest() - # Check that the path construction is reversible. - actual = EventarcClient.parse_common_location_path(path) - assert expected == actual + assert args[0] == request_msg -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_channel_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) - with mock.patch.object(transports.EventarcTransport, '_prep_wrapped_messages') as prep: - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') ) - prep.assert_called_once_with(client_info) + await client.update_channel(request=None) - with mock.patch.object(transports.EventarcTransport, '_prep_wrapped_messages') as prep: - transport_class = EventarcClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.UpdateChannelRequest() + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_transport_close_async(): +async def test_delete_channel_empty_call_grpc_asyncio(): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), transport="grpc_asyncio", ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_channel(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.DeleteChannelRequest() -def test_get_location_rest_bad_request(transport: str = 'rest', request_type=locations_pb2.GetLocationRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_provider_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_provider), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(discovery.Provider( + name='name_value', + display_name='display_name_value', + )) + await client.get_provider(request=None) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_location(request) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetProviderRequest() -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) -def test_get_location_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_providers_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListProvidersResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_providers(request=None) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListProvidersRequest() - response = client.get_location(request) + assert args[0] == request_msg - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) -def test_list_locations_rest_bad_request(transport: str = 'rest', request_type=locations_pb2.ListLocationsRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_channel_connection_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(channel_connection.ChannelConnection( + name='name_value', + uid='uid_value', + channel='channel_value', + activation_token='activation_token_value', + )) + await client.get_channel_connection(request=None) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_locations(request) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetChannelConnectionRequest() -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) -def test_list_locations_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_channel_connections_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - request_init = {'name': 'projects/sample1'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelConnectionsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_channel_connections(request=None) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListChannelConnectionsRequest() - response = client.list_locations(request) + assert args[0] == request_msg - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) -def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.GetIamPolicyRequest): +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_channel_connection_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_channel_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.CreateChannelConnectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_channel_connection_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_channel_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.DeleteChannelConnectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_google_channel_config_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_google_channel_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(google_channel_config.GoogleChannelConfig( + name='name_value', + crypto_key_name='crypto_key_name_value', + )) + await client.get_google_channel_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetGoogleChannelConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_google_channel_config_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_google_channel_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gce_google_channel_config.GoogleChannelConfig( + name='name_value', + crypto_key_name='crypto_key_name_value', + )) + await client.update_google_channel_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.UpdateGoogleChannelConfigRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = EventarcClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_get_trigger_rest_bad_request(request_type=eventarc.GetTriggerRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest" ) - - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/triggers/sample3'}, request) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): # Wrap the value into a proper Response obj - response_value = Response() + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 - response_value.request = Request() + response_value.request = mock.Mock() req.return_value = response_value - client.get_iam_policy(request) + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_trigger(request) + @pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, + eventarc.GetTriggerRequest, + dict, ]) -def test_get_iam_policy_rest(request_type): +def test_get_trigger_rest_call_success(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="rest" ) - request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} request = request_type(**request_init) + # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() + return_value = trigger.Trigger( + name='name_value', + uid='uid_value', + service_account='service_account_value', + channel='channel_value', + etag='etag_value', + ) # Wrap the value into a proper Response obj - response_value = Response() + response_value = mock.Mock() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + # Convert return value to protobuf type + return_value = trigger.Trigger.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value - - response = client.get_iam_policy(request) + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_trigger(request) # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) + assert isinstance(response, trigger.Trigger) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.service_account == 'service_account_value' + assert response.channel == 'channel_value' + assert response.etag == 'etag_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_trigger_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) -def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.SetIamPolicyRequest): + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_trigger") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_trigger_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.GetTriggerRequest.pb(eventarc.GetTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = trigger.Trigger.to_json(trigger.Trigger()) + req.return_value.content = return_value + + request = eventarc.GetTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = trigger.Trigger() + post_with_metadata.return_value = trigger.Trigger(), metadata + + client.get_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_triggers_rest_bad_request(request_type=eventarc.ListTriggersRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest" ) - - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/triggers/sample3'}, request) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): # Wrap the value into a proper Response obj - response_value = Response() + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 - response_value.request = Request() + response_value.request = mock.Mock() req.return_value = response_value - client.set_iam_policy(request) + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_triggers(request) + @pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, + eventarc.ListTriggersRequest, + dict, ]) -def test_set_iam_policy_rest(request_type): +def test_list_triggers_rest_call_success(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="rest" ) - request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} request = request_type(**request_init) + # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() + return_value = eventarc.ListTriggersResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) # Wrap the value into a proper Response obj - response_value = Response() + response_value = mock.Mock() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + # Convert return value to protobuf type + return_value = eventarc.ListTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value - - response = client.set_iam_policy(request) + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_triggers(request) # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) + assert isinstance(response, pagers.ListTriggersPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_triggers_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_triggers") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_triggers_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_list_triggers") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.ListTriggersRequest.pb(eventarc.ListTriggersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = eventarc.ListTriggersResponse.to_json(eventarc.ListTriggersResponse()) + req.return_value.content = return_value + + request = eventarc.ListTriggersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = eventarc.ListTriggersResponse() + post_with_metadata.return_value = eventarc.ListTriggersResponse(), metadata + + client.list_triggers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + -def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.TestIamPermissionsRequest): +def test_create_trigger_rest_bad_request(request_type=eventarc.CreateTriggerRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest" ) - - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/triggers/sample3'}, request) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): # Wrap the value into a proper Response obj - response_value = Response() + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 - response_value.request = Request() + response_value.request = mock.Mock() req.return_value = response_value - client.test_iam_permissions(request) + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_trigger(request) + @pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, + eventarc.CreateTriggerRequest, + dict, ]) -def test_test_iam_permissions_rest(request_type): +def test_create_trigger_rest_call_success(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="rest" ) - request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["trigger"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value', 'operator': 'operator_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}, 'cloud_function': 'cloud_function_value', 'gke': {'cluster': 'cluster_value', 'location': 'location_value', 'namespace': 'namespace_value', 'service': 'service_value', 'path': 'path_value'}, 'workflow': 'workflow_value'}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'channel': 'channel_value', 'conditions': {}, 'etag': 'etag_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = eventarc.CreateTriggerRequest.meta.fields["trigger"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["trigger"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["trigger"][field])): + del request_init["trigger"][field][i][subfield] + else: + del request_init["trigger"][field][subfield] request = request_type(**request_init) + # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() + return_value = operations_pb2.Operation(name='operations/spam') # Wrap the value into a proper Response obj - response_value = Response() + response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value - - response = client.test_iam_permissions(request) + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_trigger(request) # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + json_return_value = json_format.MessageToJson(return_value) -def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): - client = EventarcClient( + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_trigger_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_trigger") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_trigger_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_create_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.CreateTriggerRequest.pb(eventarc.CreateTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = eventarc.CreateTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_trigger_rest_bad_request(request_type=eventarc.UpdateTriggerRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): # Wrap the value into a proper Response obj - response_value = Response() + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 - response_value.request = Request() + response_value.request = mock.Mock() req.return_value = response_value - client.cancel_operation(request) + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_trigger(request) + @pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, + eventarc.UpdateTriggerRequest, + dict, ]) -def test_cancel_operation_rest(request_type): +def test_update_trigger_rest_call_success(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="rest" ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + + # send a request that will satisfy transcoding + request_init = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} + request_init["trigger"] = {'name': 'projects/sample1/locations/sample2/triggers/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value', 'operator': 'operator_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}, 'cloud_function': 'cloud_function_value', 'gke': {'cluster': 'cluster_value', 'location': 'location_value', 'namespace': 'namespace_value', 'service': 'service_value', 'path': 'path_value'}, 'workflow': 'workflow_value'}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'channel': 'channel_value', 'conditions': {}, 'etag': 'etag_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = eventarc.UpdateTriggerRequest.meta.fields["trigger"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["trigger"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["trigger"][field])): + del request_init["trigger"][field][i][subfield] + else: + del request_init["trigger"][field][subfield] request = request_type(**request_init) + # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_trigger(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_trigger_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_update_trigger") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_update_trigger_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_update_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.UpdateTriggerRequest.pb(eventarc.UpdateTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = eventarc.UpdateTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_trigger_rest_bad_request(request_type=eventarc.DeleteTriggerRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_trigger(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.DeleteTriggerRequest, + dict, +]) +def test_delete_trigger_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_trigger(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_trigger_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_trigger") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_trigger_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.DeleteTriggerRequest.pb(eventarc.DeleteTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = eventarc.DeleteTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_channel_rest_bad_request(request_type=eventarc.GetChannelRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_channel(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.GetChannelRequest, + dict, +]) +def test_get_channel_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = channel.Channel( + name='name_value', + uid='uid_value', + provider='provider_value', + state=channel.Channel.State.PENDING, + activation_token='activation_token_value', + crypto_key_name='crypto_key_name_value', + pubsub_topic='pubsub_topic_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = channel.Channel.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_channel(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, channel.Channel) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.provider == 'provider_value' + assert response.state == channel.Channel.State.PENDING + assert response.activation_token == 'activation_token_value' + assert response.crypto_key_name == 'crypto_key_name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_channel_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_channel") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_channel_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_channel") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.GetChannelRequest.pb(eventarc.GetChannelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = channel.Channel.to_json(channel.Channel()) + req.return_value.content = return_value + + request = eventarc.GetChannelRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = channel.Channel() + post_with_metadata.return_value = channel.Channel(), metadata + + client.get_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_channels_rest_bad_request(request_type=eventarc.ListChannelsRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_channels(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.ListChannelsRequest, + dict, +]) +def test_list_channels_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListChannelsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = eventarc.ListChannelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_channels(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListChannelsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_channels_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_channels") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_channels_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_list_channels") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.ListChannelsRequest.pb(eventarc.ListChannelsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = eventarc.ListChannelsResponse.to_json(eventarc.ListChannelsResponse()) + req.return_value.content = return_value + + request = eventarc.ListChannelsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = eventarc.ListChannelsResponse() + post_with_metadata.return_value = eventarc.ListChannelsResponse(), metadata + + client.list_channels(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_channel_rest_bad_request(request_type=eventarc.CreateChannelRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_channel(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.CreateChannelRequest, + dict, +]) +def test_create_channel_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["channel"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'provider': 'provider_value', 'pubsub_topic': 'pubsub_topic_value', 'state': 1, 'activation_token': 'activation_token_value', 'crypto_key_name': 'crypto_key_name_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = eventarc.CreateChannelRequest.meta.fields["channel"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["channel"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["channel"][field])): + del request_init["channel"][field][i][subfield] + else: + del request_init["channel"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_channel(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_channel_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_channel") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_channel_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_create_channel") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.CreateChannelRequest.pb(eventarc.CreateChannelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = eventarc.CreateChannelRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_channel_rest_bad_request(request_type=eventarc.UpdateChannelRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'channel': {'name': 'projects/sample1/locations/sample2/channels/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_channel(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.UpdateChannelRequest, + dict, +]) +def test_update_channel_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'channel': {'name': 'projects/sample1/locations/sample2/channels/sample3'}} + request_init["channel"] = {'name': 'projects/sample1/locations/sample2/channels/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'provider': 'provider_value', 'pubsub_topic': 'pubsub_topic_value', 'state': 1, 'activation_token': 'activation_token_value', 'crypto_key_name': 'crypto_key_name_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = eventarc.UpdateChannelRequest.meta.fields["channel"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["channel"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["channel"][field])): + del request_init["channel"][field][i][subfield] + else: + del request_init["channel"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_channel(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_channel_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_update_channel") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_update_channel_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_update_channel") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.UpdateChannelRequest.pb(eventarc.UpdateChannelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = eventarc.UpdateChannelRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_channel_rest_bad_request(request_type=eventarc.DeleteChannelRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_channel(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.DeleteChannelRequest, + dict, +]) +def test_delete_channel_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_channel(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_channel_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_channel") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_channel_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_channel") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.DeleteChannelRequest.pb(eventarc.DeleteChannelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = eventarc.DeleteChannelRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_provider_rest_bad_request(request_type=eventarc.GetProviderRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/providers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_provider(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.GetProviderRequest, + dict, +]) +def test_get_provider_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/providers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = discovery.Provider( + name='name_value', + display_name='display_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = discovery.Provider.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_provider(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, discovery.Provider) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_provider_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_provider") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_provider_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_provider") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.GetProviderRequest.pb(eventarc.GetProviderRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = discovery.Provider.to_json(discovery.Provider()) + req.return_value.content = return_value + + request = eventarc.GetProviderRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = discovery.Provider() + post_with_metadata.return_value = discovery.Provider(), metadata + + client.get_provider(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_providers_rest_bad_request(request_type=eventarc.ListProvidersRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_providers(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.ListProvidersRequest, + dict, +]) +def test_list_providers_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListProvidersResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = eventarc.ListProvidersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_providers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListProvidersPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_providers_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_providers") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_providers_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_list_providers") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.ListProvidersRequest.pb(eventarc.ListProvidersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = eventarc.ListProvidersResponse.to_json(eventarc.ListProvidersResponse()) + req.return_value.content = return_value + + request = eventarc.ListProvidersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = eventarc.ListProvidersResponse() + post_with_metadata.return_value = eventarc.ListProvidersResponse(), metadata + + client.list_providers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_channel_connection_rest_bad_request(request_type=eventarc.GetChannelConnectionRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_channel_connection(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.GetChannelConnectionRequest, + dict, +]) +def test_get_channel_connection_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = channel_connection.ChannelConnection( + name='name_value', + uid='uid_value', + channel='channel_value', + activation_token='activation_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = channel_connection.ChannelConnection.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_channel_connection(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, channel_connection.ChannelConnection) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.channel == 'channel_value' + assert response.activation_token == 'activation_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_channel_connection_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_channel_connection") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_channel_connection_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_channel_connection") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.GetChannelConnectionRequest.pb(eventarc.GetChannelConnectionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = channel_connection.ChannelConnection.to_json(channel_connection.ChannelConnection()) + req.return_value.content = return_value + + request = eventarc.GetChannelConnectionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = channel_connection.ChannelConnection() + post_with_metadata.return_value = channel_connection.ChannelConnection(), metadata + + client.get_channel_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_channel_connections_rest_bad_request(request_type=eventarc.ListChannelConnectionsRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_channel_connections(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.ListChannelConnectionsRequest, + dict, +]) +def test_list_channel_connections_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListChannelConnectionsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = eventarc.ListChannelConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_channel_connections(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListChannelConnectionsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_channel_connections_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_channel_connections") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_channel_connections_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_list_channel_connections") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.ListChannelConnectionsRequest.pb(eventarc.ListChannelConnectionsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = eventarc.ListChannelConnectionsResponse.to_json(eventarc.ListChannelConnectionsResponse()) + req.return_value.content = return_value + + request = eventarc.ListChannelConnectionsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = eventarc.ListChannelConnectionsResponse() + post_with_metadata.return_value = eventarc.ListChannelConnectionsResponse(), metadata + + client.list_channel_connections(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_channel_connection_rest_bad_request(request_type=eventarc.CreateChannelConnectionRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_channel_connection(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.CreateChannelConnectionRequest, + dict, +]) +def test_create_channel_connection_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["channel_connection"] = {'name': 'name_value', 'uid': 'uid_value', 'channel': 'channel_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'activation_token': 'activation_token_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = eventarc.CreateChannelConnectionRequest.meta.fields["channel_connection"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["channel_connection"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["channel_connection"][field])): + del request_init["channel_connection"][field][i][subfield] + else: + del request_init["channel_connection"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_channel_connection(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_channel_connection_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_channel_connection") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_channel_connection_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_create_channel_connection") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.CreateChannelConnectionRequest.pb(eventarc.CreateChannelConnectionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = eventarc.CreateChannelConnectionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_channel_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_channel_connection_rest_bad_request(request_type=eventarc.DeleteChannelConnectionRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_channel_connection(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.DeleteChannelConnectionRequest, + dict, +]) +def test_delete_channel_connection_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_channel_connection(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_channel_connection_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_channel_connection") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_channel_connection_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_channel_connection") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.DeleteChannelConnectionRequest.pb(eventarc.DeleteChannelConnectionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = eventarc.DeleteChannelConnectionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_channel_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_google_channel_config_rest_bad_request(request_type=eventarc.GetGoogleChannelConfigRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/googleChannelConfig'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_google_channel_config(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.GetGoogleChannelConfigRequest, + dict, +]) +def test_get_google_channel_config_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/googleChannelConfig'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = google_channel_config.GoogleChannelConfig( + name='name_value', + crypto_key_name='crypto_key_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = google_channel_config.GoogleChannelConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_google_channel_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, google_channel_config.GoogleChannelConfig) + assert response.name == 'name_value' + assert response.crypto_key_name == 'crypto_key_name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_google_channel_config_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_google_channel_config") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_google_channel_config_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_google_channel_config") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.GetGoogleChannelConfigRequest.pb(eventarc.GetGoogleChannelConfigRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = google_channel_config.GoogleChannelConfig.to_json(google_channel_config.GoogleChannelConfig()) + req.return_value.content = return_value + + request = eventarc.GetGoogleChannelConfigRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = google_channel_config.GoogleChannelConfig() + post_with_metadata.return_value = google_channel_config.GoogleChannelConfig(), metadata + + client.get_google_channel_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_google_channel_config_rest_bad_request(request_type=eventarc.UpdateGoogleChannelConfigRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'google_channel_config': {'name': 'projects/sample1/locations/sample2/googleChannelConfig'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_google_channel_config(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.UpdateGoogleChannelConfigRequest, + dict, +]) +def test_update_google_channel_config_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'google_channel_config': {'name': 'projects/sample1/locations/sample2/googleChannelConfig'}} + request_init["google_channel_config"] = {'name': 'projects/sample1/locations/sample2/googleChannelConfig', 'update_time': {'seconds': 751, 'nanos': 543}, 'crypto_key_name': 'crypto_key_name_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = eventarc.UpdateGoogleChannelConfigRequest.meta.fields["google_channel_config"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["google_channel_config"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["google_channel_config"][field])): + del request_init["google_channel_config"][field][i][subfield] + else: + del request_init["google_channel_config"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = gce_google_channel_config.GoogleChannelConfig( + name='name_value', + crypto_key_name='crypto_key_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gce_google_channel_config.GoogleChannelConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_google_channel_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gce_google_channel_config.GoogleChannelConfig) + assert response.name == 'name_value' + assert response.crypto_key_name == 'crypto_key_name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_google_channel_config_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_update_google_channel_config") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_update_google_channel_config_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_update_google_channel_config") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.UpdateGoogleChannelConfigRequest.pb(eventarc.UpdateGoogleChannelConfigRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = gce_google_channel_config.GoogleChannelConfig.to_json(gce_google_channel_config.GoogleChannelConfig()) + req.return_value.content = return_value + + request = eventarc.UpdateGoogleChannelConfigRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gce_google_channel_config.GoogleChannelConfig() + post_with_metadata.return_value = gce_google_channel_config.GoogleChannelConfig(), metadata + + client.update_google_channel_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_location(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.GetLocationRequest, + dict, +]) +def test_get_location_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_locations(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.ListLocationsRequest, + dict, +]) +def test_list_locations_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_get_iam_policy_rest_bad_request(request_type=iam_policy_pb2.GetIamPolicyRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/triggers/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_iam_policy(request) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.GetIamPolicyRequest, + dict, +]) +def test_get_iam_policy_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request(request_type=iam_policy_pb2.SetIamPolicyRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/triggers/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.set_iam_policy(request) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.SetIamPolicyRequest, + dict, +]) +def test_set_iam_policy_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request(request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/triggers/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.test_iam_permissions(request) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + +def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +def test_cancel_operation_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.DeleteOperationRequest, + dict, +]) +def test_delete_operation_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +def test_list_operations_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_initialize_client_w_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_trigger_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_trigger), + '__call__') as call: + client.get_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_triggers_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_triggers), + '__call__') as call: + client.list_triggers(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListTriggersRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_trigger_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_trigger), + '__call__') as call: + client.create_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.CreateTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_trigger_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_trigger), + '__call__') as call: + client.update_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.UpdateTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_trigger_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_trigger), + '__call__') as call: + client.delete_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.DeleteTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_channel_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_channel), + '__call__') as call: + client.get_channel(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetChannelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_channels_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__') as call: + client.list_channels(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListChannelsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_channel_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_), + '__call__') as call: + client.create_channel(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.CreateChannelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_channel_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_channel), + '__call__') as call: + client.update_channel(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.UpdateChannelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_channel_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel), + '__call__') as call: + client.delete_channel(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.DeleteChannelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_provider_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_provider), + '__call__') as call: + client.get_provider(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetProviderRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_providers_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__') as call: + client.list_providers(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListProvidersRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_channel_connection_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_channel_connection), + '__call__') as call: + client.get_channel_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetChannelConnectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_channel_connections_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__') as call: + client.list_channel_connections(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListChannelConnectionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_channel_connection_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_connection), + '__call__') as call: + client.create_channel_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.CreateChannelConnectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_channel_connection_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel_connection), + '__call__') as call: + client.delete_channel_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.DeleteChannelConnectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_google_channel_config_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_google_channel_config), + '__call__') as call: + client.get_google_channel_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetGoogleChannelConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_google_channel_config_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_google_channel_config), + '__call__') as call: + client.update_google_channel_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.UpdateGoogleChannelConfigRequest() + + assert args[0] == request_msg + + +def test_eventarc_rest_lro_client(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, +operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.EventarcGrpcTransport, + ) + +def test_eventarc_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.EventarcTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_eventarc_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.EventarcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'get_trigger', + 'list_triggers', + 'create_trigger', + 'update_trigger', + 'delete_trigger', + 'get_channel', + 'list_channels', + 'create_channel_', + 'update_channel', + 'delete_channel', + 'get_provider', + 'list_providers', + 'get_channel_connection', + 'list_channel_connections', + 'create_channel_connection', + 'delete_channel_connection', + 'get_google_channel_config', + 'update_google_channel_config', + 'set_iam_policy', + 'get_iam_policy', + 'test_iam_permissions', + 'get_location', + 'list_locations', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_eventarc_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.EventarcTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_eventarc_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.EventarcTransport() + adc.assert_called_once() + + +def test_eventarc_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + EventarcClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.EventarcGrpcTransport, + transports.EventarcGrpcAsyncIOTransport, + ], +) +def test_eventarc_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.EventarcGrpcTransport, + transports.EventarcGrpcAsyncIOTransport, + transports.EventarcRestTransport, + ], +) +def test_eventarc_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.EventarcGrpcTransport, grpc_helpers), + (transports.EventarcGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_eventarc_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "eventarc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="eventarc.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport]) +def test_eventarc_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_eventarc_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.EventarcRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_eventarc_host_no_port(transport_name): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='eventarc.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'eventarc.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://eventarc.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_eventarc_host_with_port(transport_name): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='eventarc.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'eventarc.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://eventarc.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_eventarc_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = EventarcClient( + credentials=creds1, + transport=transport_name, + ) + client2 = EventarcClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_trigger._session + session2 = client2.transport.get_trigger._session + assert session1 != session2 + session1 = client1.transport.list_triggers._session + session2 = client2.transport.list_triggers._session + assert session1 != session2 + session1 = client1.transport.create_trigger._session + session2 = client2.transport.create_trigger._session + assert session1 != session2 + session1 = client1.transport.update_trigger._session + session2 = client2.transport.update_trigger._session + assert session1 != session2 + session1 = client1.transport.delete_trigger._session + session2 = client2.transport.delete_trigger._session + assert session1 != session2 + session1 = client1.transport.get_channel._session + session2 = client2.transport.get_channel._session + assert session1 != session2 + session1 = client1.transport.list_channels._session + session2 = client2.transport.list_channels._session + assert session1 != session2 + session1 = client1.transport.create_channel_._session + session2 = client2.transport.create_channel_._session + assert session1 != session2 + session1 = client1.transport.update_channel._session + session2 = client2.transport.update_channel._session + assert session1 != session2 + session1 = client1.transport.delete_channel._session + session2 = client2.transport.delete_channel._session + assert session1 != session2 + session1 = client1.transport.get_provider._session + session2 = client2.transport.get_provider._session + assert session1 != session2 + session1 = client1.transport.list_providers._session + session2 = client2.transport.list_providers._session + assert session1 != session2 + session1 = client1.transport.get_channel_connection._session + session2 = client2.transport.get_channel_connection._session + assert session1 != session2 + session1 = client1.transport.list_channel_connections._session + session2 = client2.transport.list_channel_connections._session + assert session1 != session2 + session1 = client1.transport.create_channel_connection._session + session2 = client2.transport.create_channel_connection._session + assert session1 != session2 + session1 = client1.transport.delete_channel_connection._session + session2 = client2.transport.delete_channel_connection._session + assert session1 != session2 + session1 = client1.transport.get_google_channel_config._session + session2 = client2.transport.get_google_channel_config._session + assert session1 != session2 + session1 = client1.transport.update_google_channel_config._session + session2 = client2.transport.update_google_channel_config._session + assert session1 != session2 +def test_eventarc_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.EventarcGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_eventarc_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.EventarcGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport]) +def test_eventarc_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport]) +def test_eventarc_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_eventarc_grpc_lro_client(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_eventarc_grpc_lro_async_client(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_channel_path(): + project = "squid" + location = "clam" + channel = "whelk" + expected = "projects/{project}/locations/{location}/channels/{channel}".format(project=project, location=location, channel=channel, ) + actual = EventarcClient.channel_path(project, location, channel) + assert expected == actual + + +def test_parse_channel_path(): + expected = { + "project": "octopus", + "location": "oyster", + "channel": "nudibranch", + } + path = EventarcClient.channel_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_channel_path(path) + assert expected == actual + +def test_channel_connection_path(): + project = "cuttlefish" + location = "mussel" + channel_connection = "winkle" + expected = "projects/{project}/locations/{location}/channelConnections/{channel_connection}".format(project=project, location=location, channel_connection=channel_connection, ) + actual = EventarcClient.channel_connection_path(project, location, channel_connection) + assert expected == actual + + +def test_parse_channel_connection_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "channel_connection": "abalone", + } + path = EventarcClient.channel_connection_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_channel_connection_path(path) + assert expected == actual + +def test_cloud_function_path(): + project = "squid" + location = "clam" + function = "whelk" + expected = "projects/{project}/locations/{location}/functions/{function}".format(project=project, location=location, function=function, ) + actual = EventarcClient.cloud_function_path(project, location, function) + assert expected == actual + + +def test_parse_cloud_function_path(): + expected = { + "project": "octopus", + "location": "oyster", + "function": "nudibranch", + } + path = EventarcClient.cloud_function_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_cloud_function_path(path) + assert expected == actual + +def test_crypto_key_path(): + project = "cuttlefish" + location = "mussel" + key_ring = "winkle" + crypto_key = "nautilus" + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, ) + actual = EventarcClient.crypto_key_path(project, location, key_ring, crypto_key) + assert expected == actual + + +def test_parse_crypto_key_path(): + expected = { + "project": "scallop", + "location": "abalone", + "key_ring": "squid", + "crypto_key": "clam", + } + path = EventarcClient.crypto_key_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_crypto_key_path(path) + assert expected == actual + +def test_google_channel_config_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}/googleChannelConfig".format(project=project, location=location, ) + actual = EventarcClient.google_channel_config_path(project, location) + assert expected == actual + + +def test_parse_google_channel_config_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = EventarcClient.google_channel_config_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_google_channel_config_path(path) + assert expected == actual + +def test_provider_path(): + project = "cuttlefish" + location = "mussel" + provider = "winkle" + expected = "projects/{project}/locations/{location}/providers/{provider}".format(project=project, location=location, provider=provider, ) + actual = EventarcClient.provider_path(project, location, provider) + assert expected == actual + + +def test_parse_provider_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "provider": "abalone", + } + path = EventarcClient.provider_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_provider_path(path) + assert expected == actual + +def test_service_path(): + expected = "*".format() + actual = EventarcClient.service_path() + assert expected == actual + + +def test_parse_service_path(): + expected = { + } + path = EventarcClient.service_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_service_path(path) + assert expected == actual + +def test_service_account_path(): + project = "squid" + service_account = "clam" + expected = "projects/{project}/serviceAccounts/{service_account}".format(project=project, service_account=service_account, ) + actual = EventarcClient.service_account_path(project, service_account) + assert expected == actual + + +def test_parse_service_account_path(): + expected = { + "project": "whelk", + "service_account": "octopus", + } + path = EventarcClient.service_account_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_service_account_path(path) + assert expected == actual + +def test_trigger_path(): + project = "oyster" + location = "nudibranch" + trigger = "cuttlefish" + expected = "projects/{project}/locations/{location}/triggers/{trigger}".format(project=project, location=location, trigger=trigger, ) + actual = EventarcClient.trigger_path(project, location, trigger) + assert expected == actual + + +def test_parse_trigger_path(): + expected = { + "project": "mussel", + "location": "winkle", + "trigger": "nautilus", + } + path = EventarcClient.trigger_path(**expected) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '{}' + # Check that the path construction is reversible. + actual = EventarcClient.parse_trigger_path(path) + assert expected == actual - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value +def test_workflow_path(): + project = "scallop" + location = "abalone" + workflow = "squid" + expected = "projects/{project}/locations/{location}/workflows/{workflow}".format(project=project, location=location, workflow=workflow, ) + actual = EventarcClient.workflow_path(project, location, workflow) + assert expected == actual - response = client.cancel_operation(request) - # Establish that the response is the type that we expect. - assert response is None +def test_parse_workflow_path(): + expected = { + "project": "clam", + "location": "whelk", + "workflow": "octopus", + } + path = EventarcClient.workflow_path(**expected) -def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.DeleteOperationRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Check that the path construction is reversible. + actual = EventarcClient.parse_workflow_path(path) + assert expected == actual - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = EventarcClient.common_billing_account_path(billing_account) + assert expected == actual - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = EventarcClient.common_billing_account_path(**expected) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '{}' + # Check that the path construction is reversible. + actual = EventarcClient.parse_common_billing_account_path(path) + assert expected == actual - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format(folder=folder, ) + actual = EventarcClient.common_folder_path(folder) + assert expected == actual - response = client.delete_operation(request) - # Establish that the response is the type that we expect. - assert response is None +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = EventarcClient.common_folder_path(**expected) -def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Check that the path construction is reversible. + actual = EventarcClient.parse_common_folder_path(path) + assert expected == actual - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format(organization=organization, ) + actual = EventarcClient.common_organization_path(organization) + assert expected == actual - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = EventarcClient.common_organization_path(**expected) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + # Check that the path construction is reversible. + actual = EventarcClient.parse_common_organization_path(path) + assert expected == actual - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format(project=project, ) + actual = EventarcClient.common_project_path(project) + assert expected == actual - response = client.get_operation(request) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = EventarcClient.common_project_path(**expected) -def test_list_operations_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.ListOperationsRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Check that the path construction is reversible. + actual = EventarcClient.parse_common_project_path(path) + assert expected == actual - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = EventarcClient.common_location_path(project, location) + assert expected == actual - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = EventarcClient.common_location_path(**expected) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + # Check that the path construction is reversible. + actual = EventarcClient.parse_common_location_path(path) + assert expected == actual - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_operations(request) +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + with mock.patch.object(transports.EventarcTransport, '_prep_wrapped_messages') as prep: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.EventarcTransport, '_prep_wrapped_messages') as prep: + transport_class = EventarcClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) def test_delete_operation(transport: str = "grpc"): @@ -16402,21 +17105,40 @@ async def test_test_iam_permissions_from_dict_async(): ) call.assert_called() -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - for transport, close_name in transports.items(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() +def test_transport_close_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + def test_client_ctx(): transports = [ diff --git a/tests/integration/goldens/logging/docs/index.rst b/tests/integration/goldens/logging/docs/index.rst index 6a4859643f..51acc96292 100755 --- a/tests/integration/goldens/logging/docs/index.rst +++ b/tests/integration/goldens/logging/docs/index.rst @@ -3,5 +3,5 @@ API Reference .. toctree:: :maxdepth: 2 - logging_v2/services - logging_v2/types + logging_v2/services_ + logging_v2/types_ diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index a02aedca9f..615219a621 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union @@ -44,6 +45,13 @@ from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport from .client import ConfigServiceV2Client +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) class ConfigServiceV2AsyncClient: """Service for configuring sinks used to route log entries.""" @@ -242,13 +250,27 @@ def __init__(self, *, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.logging_v2.ConfigServiceV2AsyncClient`.", + extra = { + "serviceName": "google.logging.v2.ConfigServiceV2", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.logging.v2.ConfigServiceV2", + "credentialsType": None, + } + ) + async def list_buckets(self, request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListBucketsAsyncPager: r"""Lists log buckets. @@ -303,8 +325,10 @@ async def sample_list_buckets(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager: @@ -375,7 +399,7 @@ async def get_bucket(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogBucket: r"""Gets a log bucket. @@ -411,8 +435,10 @@ async def sample_get_bucket(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogBucket: @@ -457,7 +483,7 @@ async def create_bucket_async(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Creates a log bucket asynchronously that can be used to store log entries. @@ -501,8 +527,10 @@ async def sample_create_bucket_async(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -558,7 +586,7 @@ async def update_bucket_async(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Updates a log bucket asynchronously. @@ -604,8 +632,10 @@ async def sample_update_bucket_async(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -661,7 +691,7 @@ async def create_bucket(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogBucket: r"""Creates a log bucket that can be used to store log entries. After a bucket has been created, the bucket's @@ -700,8 +730,10 @@ async def sample_create_bucket(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogBucket: @@ -746,7 +778,7 @@ async def update_bucket(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogBucket: r"""Updates a log bucket. @@ -788,8 +820,10 @@ async def sample_update_bucket(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogBucket: @@ -834,7 +868,7 @@ async def delete_bucket(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a log bucket. @@ -872,8 +906,10 @@ async def sample_delete_bucket(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as @@ -909,7 +945,7 @@ async def undelete_bucket(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Undeletes a log bucket. A bucket that has been deleted can be undeleted within the grace period of 7 @@ -944,8 +980,10 @@ async def sample_undelete_bucket(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as @@ -982,7 +1020,7 @@ async def list_views(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListViewsAsyncPager: r"""Lists views on a log bucket. @@ -1029,8 +1067,10 @@ async def sample_list_views(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsAsyncPager: @@ -1101,7 +1141,7 @@ async def get_view(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogView: r"""Gets a view on a log bucket.. @@ -1137,8 +1177,10 @@ async def sample_get_view(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogView: @@ -1183,7 +1225,7 @@ async def create_view(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogView: r"""Creates a view over log entries in a log bucket. A bucket may contain a maximum of 30 views. @@ -1221,8 +1263,10 @@ async def sample_create_view(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogView: @@ -1267,7 +1311,7 @@ async def update_view(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogView: r"""Updates a view on a log bucket. This method replaces the following fields in the existing view with values from the new @@ -1307,8 +1351,10 @@ async def sample_update_view(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogView: @@ -1353,7 +1399,7 @@ async def delete_view(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is returned, this indicates that system is not in a state where it @@ -1389,8 +1435,10 @@ async def sample_delete_view(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as @@ -1427,7 +1475,7 @@ async def list_sinks(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListSinksAsyncPager: r"""Lists sinks. @@ -1478,8 +1526,10 @@ async def sample_list_sinks(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksAsyncPager: @@ -1550,7 +1600,7 @@ async def get_sink(self, sink_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogSink: r"""Gets a sink. @@ -1603,8 +1653,10 @@ async def sample_get_sink(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogSink: @@ -1670,7 +1722,7 @@ async def create_sink(self, sink: Optional[logging_config.LogSink] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogSink: r"""Creates a sink that exports specified log entries to a destination. The export of newly-ingested log entries begins @@ -1739,8 +1791,10 @@ async def sample_create_sink(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogSink: @@ -1809,7 +1863,7 @@ async def update_sink(self, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogSink: r"""Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: ``destination``, @@ -1902,8 +1956,10 @@ async def sample_update_sink(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogSink: @@ -1972,7 +2028,7 @@ async def delete_sink(self, sink_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a sink. If the sink has a unique ``writer_identity``, then that service account is also deleted. @@ -2024,8 +2080,10 @@ async def sample_delete_sink(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -2076,7 +2134,7 @@ async def create_link(self, link_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Asynchronously creates a linked dataset in BigQuery which makes it possible to use BigQuery to read the logs @@ -2147,8 +2205,10 @@ async def sample_create_link(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -2221,7 +2281,7 @@ async def delete_link(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Deletes a link. This will also delete the corresponding BigQuery linked dataset. @@ -2273,8 +2333,10 @@ async def sample_delete_link(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -2350,7 +2412,7 @@ async def list_links(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListLinksAsyncPager: r"""Lists links. @@ -2399,8 +2461,10 @@ async def sample_list_links(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksAsyncPager: @@ -2472,7 +2536,7 @@ async def get_link(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.Link: r"""Gets a link. @@ -2519,8 +2583,10 @@ async def sample_get_link(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.Link: @@ -2578,7 +2644,7 @@ async def list_exclusions(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListExclusionsAsyncPager: r"""Lists all the exclusions on the \_Default sink in a parent resource. @@ -2630,8 +2696,10 @@ async def sample_list_exclusions(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager: @@ -2702,7 +2770,7 @@ async def get_exclusion(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogExclusion: r"""Gets the description of an exclusion in the \_Default sink. @@ -2755,8 +2823,10 @@ async def sample_get_exclusion(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogExclusion: @@ -2820,7 +2890,7 @@ async def create_exclusion(self, exclusion: Optional[logging_config.LogExclusion] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogExclusion: r"""Creates a new exclusion in the \_Default sink in a specified parent resource. Only log entries belonging to that resource can @@ -2890,8 +2960,10 @@ async def sample_create_exclusion(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogExclusion: @@ -2958,7 +3030,7 @@ async def update_exclusion(self, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogExclusion: r"""Changes one or more properties of an existing exclusion in the \_Default sink. @@ -3039,8 +3111,10 @@ async def sample_update_exclusion(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogExclusion: @@ -3107,7 +3181,7 @@ async def delete_exclusion(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes an exclusion in the \_Default sink. @@ -3158,8 +3232,10 @@ async def sample_delete_exclusion(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -3207,7 +3283,7 @@ async def get_cmek_settings(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.CmekSettings: r"""Gets the Logging CMEK settings for the given resource. @@ -3257,8 +3333,10 @@ async def sample_get_cmek_settings(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.CmekSettings: @@ -3313,7 +3391,7 @@ async def update_cmek_settings(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.CmekSettings: r"""Updates the Log Router CMEK settings for the given resource. @@ -3368,8 +3446,10 @@ async def sample_update_cmek_settings(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.CmekSettings: @@ -3425,7 +3505,7 @@ async def get_settings(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.Settings: r"""Gets the Log Router settings for the given resource. @@ -3500,8 +3580,10 @@ async def sample_get_settings(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.Settings: @@ -3561,7 +3643,7 @@ async def update_settings(self, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.Settings: r"""Updates the Log Router settings for the given resource. @@ -3643,8 +3725,10 @@ async def sample_update_settings(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.Settings: @@ -3704,7 +3788,7 @@ async def copy_log_entries(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Copies a set of log entries from a log bucket to a Cloud Storage bucket. @@ -3746,8 +3830,10 @@ async def sample_copy_log_entries(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -3797,7 +3883,7 @@ async def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -3808,8 +3894,10 @@ async def list_operations( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -3822,11 +3910,7 @@ async def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -3851,7 +3935,7 @@ async def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -3862,8 +3946,10 @@ async def get_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -3876,11 +3962,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -3905,7 +3987,7 @@ async def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -3920,8 +4002,10 @@ async def cancel_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -3933,11 +4017,7 @@ async def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index aea2902983..4bbd38b20d 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -16,6 +16,7 @@ from collections import OrderedDict from http import HTTPStatus import json +import logging as std_logging import os import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast @@ -38,6 +39,14 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.logging_v2.services.config_service_v2 import pagers @@ -458,33 +467,6 @@ def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_ raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes(client_universe: str, - credentials: ga_credentials.Credentials) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ConfigServiceV2Client._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError("The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default.") - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -494,9 +476,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = (self._is_universe_domain_valid or - ConfigServiceV2Client._compare_universes(self.universe_domain, self.transport._credentials)) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True def _add_cred_info_for_auth_errors( self, @@ -608,6 +590,10 @@ def __init__(self, *, # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError("client_options.api_key and credentials are mutually exclusive") @@ -660,13 +646,28 @@ def __init__(self, *, api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.logging_v2.ConfigServiceV2Client`.", + extra = { + "serviceName": "google.logging.v2.ConfigServiceV2", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.logging.v2.ConfigServiceV2", + "credentialsType": None, + } + ) + def list_buckets(self, request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListBucketsPager: r"""Lists log buckets. @@ -721,8 +722,10 @@ def sample_list_buckets(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager: @@ -754,7 +757,7 @@ def sample_list_buckets(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_buckets] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -792,7 +795,7 @@ def get_bucket(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogBucket: r"""Gets a log bucket. @@ -828,8 +831,10 @@ def sample_get_bucket(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogBucket: @@ -847,7 +852,7 @@ def sample_get_bucket(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_bucket] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -874,7 +879,7 @@ def create_bucket_async(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Creates a log bucket asynchronously that can be used to store log entries. @@ -918,8 +923,10 @@ def sample_create_bucket_async(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -940,7 +947,7 @@ def sample_create_bucket_async(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_bucket_async] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -975,7 +982,7 @@ def update_bucket_async(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Updates a log bucket asynchronously. @@ -1021,8 +1028,10 @@ def sample_update_bucket_async(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1043,7 +1052,7 @@ def sample_update_bucket_async(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_bucket_async] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1078,7 +1087,7 @@ def create_bucket(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogBucket: r"""Creates a log bucket that can be used to store log entries. After a bucket has been created, the bucket's @@ -1117,8 +1126,10 @@ def sample_create_bucket(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogBucket: @@ -1136,7 +1147,7 @@ def sample_create_bucket(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_bucket] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1163,7 +1174,7 @@ def update_bucket(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogBucket: r"""Updates a log bucket. @@ -1205,8 +1216,10 @@ def sample_update_bucket(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogBucket: @@ -1224,7 +1237,7 @@ def sample_update_bucket(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_bucket] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1251,7 +1264,7 @@ def delete_bucket(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a log bucket. @@ -1289,8 +1302,10 @@ def sample_delete_bucket(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as @@ -1302,7 +1317,7 @@ def sample_delete_bucket(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_bucket] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1326,7 +1341,7 @@ def undelete_bucket(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Undeletes a log bucket. A bucket that has been deleted can be undeleted within the grace period of 7 @@ -1361,8 +1376,10 @@ def sample_undelete_bucket(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as @@ -1374,7 +1391,7 @@ def sample_undelete_bucket(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.undelete_bucket] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1399,7 +1416,7 @@ def list_views(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListViewsPager: r"""Lists views on a log bucket. @@ -1446,8 +1463,10 @@ def sample_list_views(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsPager: @@ -1479,7 +1498,7 @@ def sample_list_views(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_views] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1517,7 +1536,7 @@ def get_view(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogView: r"""Gets a view on a log bucket.. @@ -1553,8 +1572,10 @@ def sample_get_view(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogView: @@ -1572,7 +1593,7 @@ def sample_get_view(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_view] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1599,7 +1620,7 @@ def create_view(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogView: r"""Creates a view over log entries in a log bucket. A bucket may contain a maximum of 30 views. @@ -1637,8 +1658,10 @@ def sample_create_view(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogView: @@ -1656,7 +1679,7 @@ def sample_create_view(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_view] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1683,7 +1706,7 @@ def update_view(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogView: r"""Updates a view on a log bucket. This method replaces the following fields in the existing view with values from the new @@ -1723,8 +1746,10 @@ def sample_update_view(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogView: @@ -1742,7 +1767,7 @@ def sample_update_view(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_view] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1769,7 +1794,7 @@ def delete_view(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is returned, this indicates that system is not in a state where it @@ -1805,8 +1830,10 @@ def sample_delete_view(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as @@ -1818,7 +1845,7 @@ def sample_delete_view(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_view] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1843,7 +1870,7 @@ def list_sinks(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListSinksPager: r"""Lists sinks. @@ -1894,8 +1921,10 @@ def sample_list_sinks(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksPager: @@ -1926,7 +1955,7 @@ def sample_list_sinks(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_sinks] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1965,7 +1994,7 @@ def get_sink(self, sink_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogSink: r"""Gets a sink. @@ -2018,8 +2047,10 @@ def sample_get_sink(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogSink: @@ -2055,7 +2086,7 @@ def sample_get_sink(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_sink] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2084,7 +2115,7 @@ def create_sink(self, sink: Optional[logging_config.LogSink] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogSink: r"""Creates a sink that exports specified log entries to a destination. The export of newly-ingested log entries begins @@ -2153,8 +2184,10 @@ def sample_create_sink(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogSink: @@ -2192,7 +2225,7 @@ def sample_create_sink(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_sink] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2222,7 +2255,7 @@ def update_sink(self, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogSink: r"""Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: ``destination``, @@ -2315,8 +2348,10 @@ def sample_update_sink(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogSink: @@ -2356,7 +2391,7 @@ def sample_update_sink(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_sink] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2384,7 +2419,7 @@ def delete_sink(self, sink_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a sink. If the sink has a unique ``writer_identity``, then that service account is also deleted. @@ -2436,8 +2471,10 @@ def sample_delete_sink(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -2460,7 +2497,7 @@ def sample_delete_sink(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_sink] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2487,7 +2524,7 @@ def create_link(self, link_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Asynchronously creates a linked dataset in BigQuery which makes it possible to use BigQuery to read the logs @@ -2558,8 +2595,10 @@ def sample_create_link(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -2595,7 +2634,7 @@ def sample_create_link(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_link] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2631,7 +2670,7 @@ def delete_link(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Deletes a link. This will also delete the corresponding BigQuery linked dataset. @@ -2683,8 +2722,10 @@ def sample_delete_link(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -2723,7 +2764,7 @@ def sample_delete_link(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_link] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2759,7 +2800,7 @@ def list_links(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListLinksPager: r"""Lists links. @@ -2808,8 +2849,10 @@ def sample_list_links(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksPager: @@ -2841,7 +2884,7 @@ def sample_list_links(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_links] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2880,7 +2923,7 @@ def get_link(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.Link: r"""Gets a link. @@ -2927,8 +2970,10 @@ def sample_get_link(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.Link: @@ -2957,7 +3002,7 @@ def sample_get_link(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_link] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2985,7 +3030,7 @@ def list_exclusions(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListExclusionsPager: r"""Lists all the exclusions on the \_Default sink in a parent resource. @@ -3037,8 +3082,10 @@ def sample_list_exclusions(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager: @@ -3069,7 +3116,7 @@ def sample_list_exclusions(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_exclusions] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -3108,7 +3155,7 @@ def get_exclusion(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogExclusion: r"""Gets the description of an exclusion in the \_Default sink. @@ -3161,8 +3208,10 @@ def sample_get_exclusion(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogExclusion: @@ -3196,7 +3245,7 @@ def sample_get_exclusion(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_exclusion] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -3225,7 +3274,7 @@ def create_exclusion(self, exclusion: Optional[logging_config.LogExclusion] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogExclusion: r"""Creates a new exclusion in the \_Default sink in a specified parent resource. Only log entries belonging to that resource can @@ -3295,8 +3344,10 @@ def sample_create_exclusion(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogExclusion: @@ -3332,7 +3383,7 @@ def sample_create_exclusion(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_exclusion] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -3362,7 +3413,7 @@ def update_exclusion(self, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogExclusion: r"""Changes one or more properties of an existing exclusion in the \_Default sink. @@ -3443,8 +3494,10 @@ def sample_update_exclusion(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogExclusion: @@ -3482,7 +3535,7 @@ def sample_update_exclusion(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_exclusion] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -3510,7 +3563,7 @@ def delete_exclusion(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes an exclusion in the \_Default sink. @@ -3561,8 +3614,10 @@ def sample_delete_exclusion(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -3585,7 +3640,7 @@ def sample_delete_exclusion(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_exclusion] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -3609,7 +3664,7 @@ def get_cmek_settings(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.CmekSettings: r"""Gets the Logging CMEK settings for the given resource. @@ -3659,8 +3714,10 @@ def sample_get_cmek_settings(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.CmekSettings: @@ -3688,7 +3745,7 @@ def sample_get_cmek_settings(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_cmek_settings] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -3715,7 +3772,7 @@ def update_cmek_settings(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.CmekSettings: r"""Updates the Log Router CMEK settings for the given resource. @@ -3770,8 +3827,10 @@ def sample_update_cmek_settings(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.CmekSettings: @@ -3799,7 +3858,7 @@ def sample_update_cmek_settings(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_cmek_settings] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -3827,7 +3886,7 @@ def get_settings(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.Settings: r"""Gets the Log Router settings for the given resource. @@ -3902,8 +3961,10 @@ def sample_get_settings(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.Settings: @@ -3933,7 +3994,7 @@ def sample_get_settings(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_settings] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -3962,7 +4023,7 @@ def update_settings(self, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.Settings: r"""Updates the Log Router settings for the given resource. @@ -4044,8 +4105,10 @@ def sample_update_settings(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.Settings: @@ -4077,7 +4140,7 @@ def sample_update_settings(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_settings] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -4104,7 +4167,7 @@ def copy_log_entries(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Copies a set of log entries from a log bucket to a Cloud Storage bucket. @@ -4146,8 +4209,10 @@ def sample_copy_log_entries(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -4210,7 +4275,7 @@ def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -4221,8 +4286,10 @@ def list_operations( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -4235,11 +4302,7 @@ def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -4268,7 +4331,7 @@ def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -4279,8 +4342,10 @@ def get_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -4293,11 +4358,7 @@ def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -4326,7 +4387,7 @@ def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -4341,8 +4402,10 @@ def cancel_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -4354,11 +4417,7 @@ def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py index 459a76f213..ff7d5d2335 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -51,7 +51,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -64,8 +64,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListBucketsRequest(request) @@ -117,7 +119,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -130,8 +132,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListBucketsRequest(request) @@ -186,7 +190,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -199,8 +203,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListViewsRequest(request) @@ -252,7 +258,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -265,8 +271,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListViewsRequest(request) @@ -321,7 +329,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -334,8 +342,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListSinksRequest(request) @@ -387,7 +397,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -400,8 +410,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListSinksRequest(request) @@ -456,7 +468,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -469,8 +481,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListLinksRequest(request) @@ -522,7 +536,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -535,8 +549,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListLinksRequest(request) @@ -591,7 +607,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -604,8 +620,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListExclusionsRequest(request) @@ -657,7 +675,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -670,8 +688,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListExclusionsRequest(request) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 26cbb2b350..6dbea3b483 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -362,6 +362,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 08c0c4707b..02789efdee 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -22,14 +25,85 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.cloud.logging_v2.types import logging_config from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.logging.v2.ConfigServiceV2", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.logging.v2.ConfigServiceV2", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class ConfigServiceV2GrpcTransport(ConfigServiceV2Transport): """gRPC backend transport for ConfigServiceV2. @@ -181,7 +255,10 @@ def __init__(self, *, ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -245,7 +322,7 @@ def operations_client(self) -> operations_v1.OperationsClient: # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsClient( - self.grpc_channel + self._logged_channel ) # Return the client from cache. @@ -270,7 +347,7 @@ def list_buckets(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_buckets' not in self._stubs: - self._stubs['list_buckets'] = self.grpc_channel.unary_unary( + self._stubs['list_buckets'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/ListBuckets', request_serializer=logging_config.ListBucketsRequest.serialize, response_deserializer=logging_config.ListBucketsResponse.deserialize, @@ -296,7 +373,7 @@ def get_bucket(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_bucket' not in self._stubs: - self._stubs['get_bucket'] = self.grpc_channel.unary_unary( + self._stubs['get_bucket'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/GetBucket', request_serializer=logging_config.GetBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, @@ -325,7 +402,7 @@ def create_bucket_async(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_bucket_async' not in self._stubs: - self._stubs['create_bucket_async'] = self.grpc_channel.unary_unary( + self._stubs['create_bucket_async'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/CreateBucketAsync', request_serializer=logging_config.CreateBucketRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -357,7 +434,7 @@ def update_bucket_async(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_bucket_async' not in self._stubs: - self._stubs['update_bucket_async'] = self.grpc_channel.unary_unary( + self._stubs['update_bucket_async'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UpdateBucketAsync', request_serializer=logging_config.UpdateBucketRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -385,7 +462,7 @@ def create_bucket(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_bucket' not in self._stubs: - self._stubs['create_bucket'] = self.grpc_channel.unary_unary( + self._stubs['create_bucket'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/CreateBucket', request_serializer=logging_config.CreateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, @@ -417,7 +494,7 @@ def update_bucket(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_bucket' not in self._stubs: - self._stubs['update_bucket'] = self.grpc_channel.unary_unary( + self._stubs['update_bucket'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UpdateBucket', request_serializer=logging_config.UpdateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, @@ -448,7 +525,7 @@ def delete_bucket(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_bucket' not in self._stubs: - self._stubs['delete_bucket'] = self.grpc_channel.unary_unary( + self._stubs['delete_bucket'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/DeleteBucket', request_serializer=logging_config.DeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -476,7 +553,7 @@ def undelete_bucket(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'undelete_bucket' not in self._stubs: - self._stubs['undelete_bucket'] = self.grpc_channel.unary_unary( + self._stubs['undelete_bucket'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UndeleteBucket', request_serializer=logging_config.UndeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -502,7 +579,7 @@ def list_views(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_views' not in self._stubs: - self._stubs['list_views'] = self.grpc_channel.unary_unary( + self._stubs['list_views'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/ListViews', request_serializer=logging_config.ListViewsRequest.serialize, response_deserializer=logging_config.ListViewsResponse.deserialize, @@ -528,7 +605,7 @@ def get_view(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_view' not in self._stubs: - self._stubs['get_view'] = self.grpc_channel.unary_unary( + self._stubs['get_view'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/GetView', request_serializer=logging_config.GetViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, @@ -555,7 +632,7 @@ def create_view(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_view' not in self._stubs: - self._stubs['create_view'] = self.grpc_channel.unary_unary( + self._stubs['create_view'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/CreateView', request_serializer=logging_config.CreateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, @@ -585,7 +662,7 @@ def update_view(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_view' not in self._stubs: - self._stubs['update_view'] = self.grpc_channel.unary_unary( + self._stubs['update_view'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UpdateView', request_serializer=logging_config.UpdateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, @@ -614,7 +691,7 @@ def delete_view(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_view' not in self._stubs: - self._stubs['delete_view'] = self.grpc_channel.unary_unary( + self._stubs['delete_view'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/DeleteView', request_serializer=logging_config.DeleteViewRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -640,7 +717,7 @@ def list_sinks(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_sinks' not in self._stubs: - self._stubs['list_sinks'] = self.grpc_channel.unary_unary( + self._stubs['list_sinks'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/ListSinks', request_serializer=logging_config.ListSinksRequest.serialize, response_deserializer=logging_config.ListSinksResponse.deserialize, @@ -666,7 +743,7 @@ def get_sink(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_sink' not in self._stubs: - self._stubs['get_sink'] = self.grpc_channel.unary_unary( + self._stubs['get_sink'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/GetSink', request_serializer=logging_config.GetSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, @@ -696,7 +773,7 @@ def create_sink(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_sink' not in self._stubs: - self._stubs['create_sink'] = self.grpc_channel.unary_unary( + self._stubs['create_sink'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/CreateSink', request_serializer=logging_config.CreateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, @@ -727,7 +804,7 @@ def update_sink(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_sink' not in self._stubs: - self._stubs['update_sink'] = self.grpc_channel.unary_unary( + self._stubs['update_sink'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UpdateSink', request_serializer=logging_config.UpdateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, @@ -754,7 +831,7 @@ def delete_sink(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_sink' not in self._stubs: - self._stubs['delete_sink'] = self.grpc_channel.unary_unary( + self._stubs['delete_sink'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/DeleteSink', request_serializer=logging_config.DeleteSinkRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -783,7 +860,7 @@ def create_link(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_link' not in self._stubs: - self._stubs['create_link'] = self.grpc_channel.unary_unary( + self._stubs['create_link'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/CreateLink', request_serializer=logging_config.CreateLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -810,7 +887,7 @@ def delete_link(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_link' not in self._stubs: - self._stubs['delete_link'] = self.grpc_channel.unary_unary( + self._stubs['delete_link'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/DeleteLink', request_serializer=logging_config.DeleteLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -836,7 +913,7 @@ def list_links(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_links' not in self._stubs: - self._stubs['list_links'] = self.grpc_channel.unary_unary( + self._stubs['list_links'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/ListLinks', request_serializer=logging_config.ListLinksRequest.serialize, response_deserializer=logging_config.ListLinksResponse.deserialize, @@ -862,7 +939,7 @@ def get_link(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_link' not in self._stubs: - self._stubs['get_link'] = self.grpc_channel.unary_unary( + self._stubs['get_link'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/GetLink', request_serializer=logging_config.GetLinkRequest.serialize, response_deserializer=logging_config.Link.deserialize, @@ -889,7 +966,7 @@ def list_exclusions(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_exclusions' not in self._stubs: - self._stubs['list_exclusions'] = self.grpc_channel.unary_unary( + self._stubs['list_exclusions'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/ListExclusions', request_serializer=logging_config.ListExclusionsRequest.serialize, response_deserializer=logging_config.ListExclusionsResponse.deserialize, @@ -915,7 +992,7 @@ def get_exclusion(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_exclusion' not in self._stubs: - self._stubs['get_exclusion'] = self.grpc_channel.unary_unary( + self._stubs['get_exclusion'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/GetExclusion', request_serializer=logging_config.GetExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, @@ -943,7 +1020,7 @@ def create_exclusion(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_exclusion' not in self._stubs: - self._stubs['create_exclusion'] = self.grpc_channel.unary_unary( + self._stubs['create_exclusion'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/CreateExclusion', request_serializer=logging_config.CreateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, @@ -970,7 +1047,7 @@ def update_exclusion(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_exclusion' not in self._stubs: - self._stubs['update_exclusion'] = self.grpc_channel.unary_unary( + self._stubs['update_exclusion'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UpdateExclusion', request_serializer=logging_config.UpdateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, @@ -996,7 +1073,7 @@ def delete_exclusion(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_exclusion' not in self._stubs: - self._stubs['delete_exclusion'] = self.grpc_channel.unary_unary( + self._stubs['delete_exclusion'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/DeleteExclusion', request_serializer=logging_config.DeleteExclusionRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -1031,7 +1108,7 @@ def get_cmek_settings(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_cmek_settings' not in self._stubs: - self._stubs['get_cmek_settings'] = self.grpc_channel.unary_unary( + self._stubs['get_cmek_settings'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/GetCmekSettings', request_serializer=logging_config.GetCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, @@ -1071,7 +1148,7 @@ def update_cmek_settings(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_cmek_settings' not in self._stubs: - self._stubs['update_cmek_settings'] = self.grpc_channel.unary_unary( + self._stubs['update_cmek_settings'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UpdateCmekSettings', request_serializer=logging_config.UpdateCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, @@ -1107,7 +1184,7 @@ def get_settings(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_settings' not in self._stubs: - self._stubs['get_settings'] = self.grpc_channel.unary_unary( + self._stubs['get_settings'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/GetSettings', request_serializer=logging_config.GetSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, @@ -1150,7 +1227,7 @@ def update_settings(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_settings' not in self._stubs: - self._stubs['update_settings'] = self.grpc_channel.unary_unary( + self._stubs['update_settings'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UpdateSettings', request_serializer=logging_config.UpdateSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, @@ -1177,7 +1254,7 @@ def copy_log_entries(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'copy_log_entries' not in self._stubs: - self._stubs['copy_log_entries'] = self.grpc_channel.unary_unary( + self._stubs['copy_log_entries'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/CopyLogEntries', request_serializer=logging_config.CopyLogEntriesRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -1185,7 +1262,7 @@ def copy_log_entries(self) -> Callable[ return self._stubs['copy_log_entries'] def close(self): - self.grpc_channel.close() + self._logged_channel.close() @property def cancel_operation( @@ -1198,7 +1275,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -1216,7 +1293,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -1234,7 +1311,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 4b5df55245..78495902e8 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -14,6 +14,9 @@ # limitations under the License. # import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -24,8 +27,11 @@ from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging_config @@ -34,6 +40,73 @@ from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import ConfigServiceV2GrpcTransport +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.logging.v2.ConfigServiceV2", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.logging.v2.ConfigServiceV2", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class ConfigServiceV2GrpcAsyncIOTransport(ConfigServiceV2Transport): """gRPC AsyncIO backend transport for ConfigServiceV2. @@ -227,8 +300,11 @@ def __init__(self, *, ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -251,7 +327,7 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel + self._logged_channel ) # Return the client from cache. @@ -276,7 +352,7 @@ def list_buckets(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_buckets' not in self._stubs: - self._stubs['list_buckets'] = self.grpc_channel.unary_unary( + self._stubs['list_buckets'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/ListBuckets', request_serializer=logging_config.ListBucketsRequest.serialize, response_deserializer=logging_config.ListBucketsResponse.deserialize, @@ -302,7 +378,7 @@ def get_bucket(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_bucket' not in self._stubs: - self._stubs['get_bucket'] = self.grpc_channel.unary_unary( + self._stubs['get_bucket'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/GetBucket', request_serializer=logging_config.GetBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, @@ -331,7 +407,7 @@ def create_bucket_async(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_bucket_async' not in self._stubs: - self._stubs['create_bucket_async'] = self.grpc_channel.unary_unary( + self._stubs['create_bucket_async'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/CreateBucketAsync', request_serializer=logging_config.CreateBucketRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -363,7 +439,7 @@ def update_bucket_async(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_bucket_async' not in self._stubs: - self._stubs['update_bucket_async'] = self.grpc_channel.unary_unary( + self._stubs['update_bucket_async'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UpdateBucketAsync', request_serializer=logging_config.UpdateBucketRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -391,7 +467,7 @@ def create_bucket(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_bucket' not in self._stubs: - self._stubs['create_bucket'] = self.grpc_channel.unary_unary( + self._stubs['create_bucket'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/CreateBucket', request_serializer=logging_config.CreateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, @@ -423,7 +499,7 @@ def update_bucket(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_bucket' not in self._stubs: - self._stubs['update_bucket'] = self.grpc_channel.unary_unary( + self._stubs['update_bucket'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UpdateBucket', request_serializer=logging_config.UpdateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, @@ -454,7 +530,7 @@ def delete_bucket(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_bucket' not in self._stubs: - self._stubs['delete_bucket'] = self.grpc_channel.unary_unary( + self._stubs['delete_bucket'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/DeleteBucket', request_serializer=logging_config.DeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -482,7 +558,7 @@ def undelete_bucket(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'undelete_bucket' not in self._stubs: - self._stubs['undelete_bucket'] = self.grpc_channel.unary_unary( + self._stubs['undelete_bucket'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UndeleteBucket', request_serializer=logging_config.UndeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -508,7 +584,7 @@ def list_views(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_views' not in self._stubs: - self._stubs['list_views'] = self.grpc_channel.unary_unary( + self._stubs['list_views'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/ListViews', request_serializer=logging_config.ListViewsRequest.serialize, response_deserializer=logging_config.ListViewsResponse.deserialize, @@ -534,7 +610,7 @@ def get_view(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_view' not in self._stubs: - self._stubs['get_view'] = self.grpc_channel.unary_unary( + self._stubs['get_view'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/GetView', request_serializer=logging_config.GetViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, @@ -561,7 +637,7 @@ def create_view(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_view' not in self._stubs: - self._stubs['create_view'] = self.grpc_channel.unary_unary( + self._stubs['create_view'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/CreateView', request_serializer=logging_config.CreateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, @@ -591,7 +667,7 @@ def update_view(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_view' not in self._stubs: - self._stubs['update_view'] = self.grpc_channel.unary_unary( + self._stubs['update_view'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UpdateView', request_serializer=logging_config.UpdateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, @@ -620,7 +696,7 @@ def delete_view(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_view' not in self._stubs: - self._stubs['delete_view'] = self.grpc_channel.unary_unary( + self._stubs['delete_view'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/DeleteView', request_serializer=logging_config.DeleteViewRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -646,7 +722,7 @@ def list_sinks(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_sinks' not in self._stubs: - self._stubs['list_sinks'] = self.grpc_channel.unary_unary( + self._stubs['list_sinks'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/ListSinks', request_serializer=logging_config.ListSinksRequest.serialize, response_deserializer=logging_config.ListSinksResponse.deserialize, @@ -672,7 +748,7 @@ def get_sink(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_sink' not in self._stubs: - self._stubs['get_sink'] = self.grpc_channel.unary_unary( + self._stubs['get_sink'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/GetSink', request_serializer=logging_config.GetSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, @@ -702,7 +778,7 @@ def create_sink(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_sink' not in self._stubs: - self._stubs['create_sink'] = self.grpc_channel.unary_unary( + self._stubs['create_sink'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/CreateSink', request_serializer=logging_config.CreateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, @@ -733,7 +809,7 @@ def update_sink(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_sink' not in self._stubs: - self._stubs['update_sink'] = self.grpc_channel.unary_unary( + self._stubs['update_sink'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UpdateSink', request_serializer=logging_config.UpdateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, @@ -760,7 +836,7 @@ def delete_sink(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_sink' not in self._stubs: - self._stubs['delete_sink'] = self.grpc_channel.unary_unary( + self._stubs['delete_sink'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/DeleteSink', request_serializer=logging_config.DeleteSinkRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -789,7 +865,7 @@ def create_link(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_link' not in self._stubs: - self._stubs['create_link'] = self.grpc_channel.unary_unary( + self._stubs['create_link'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/CreateLink', request_serializer=logging_config.CreateLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -816,7 +892,7 @@ def delete_link(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_link' not in self._stubs: - self._stubs['delete_link'] = self.grpc_channel.unary_unary( + self._stubs['delete_link'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/DeleteLink', request_serializer=logging_config.DeleteLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -842,7 +918,7 @@ def list_links(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_links' not in self._stubs: - self._stubs['list_links'] = self.grpc_channel.unary_unary( + self._stubs['list_links'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/ListLinks', request_serializer=logging_config.ListLinksRequest.serialize, response_deserializer=logging_config.ListLinksResponse.deserialize, @@ -868,7 +944,7 @@ def get_link(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_link' not in self._stubs: - self._stubs['get_link'] = self.grpc_channel.unary_unary( + self._stubs['get_link'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/GetLink', request_serializer=logging_config.GetLinkRequest.serialize, response_deserializer=logging_config.Link.deserialize, @@ -895,7 +971,7 @@ def list_exclusions(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_exclusions' not in self._stubs: - self._stubs['list_exclusions'] = self.grpc_channel.unary_unary( + self._stubs['list_exclusions'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/ListExclusions', request_serializer=logging_config.ListExclusionsRequest.serialize, response_deserializer=logging_config.ListExclusionsResponse.deserialize, @@ -921,7 +997,7 @@ def get_exclusion(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_exclusion' not in self._stubs: - self._stubs['get_exclusion'] = self.grpc_channel.unary_unary( + self._stubs['get_exclusion'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/GetExclusion', request_serializer=logging_config.GetExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, @@ -949,7 +1025,7 @@ def create_exclusion(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_exclusion' not in self._stubs: - self._stubs['create_exclusion'] = self.grpc_channel.unary_unary( + self._stubs['create_exclusion'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/CreateExclusion', request_serializer=logging_config.CreateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, @@ -976,7 +1052,7 @@ def update_exclusion(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_exclusion' not in self._stubs: - self._stubs['update_exclusion'] = self.grpc_channel.unary_unary( + self._stubs['update_exclusion'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UpdateExclusion', request_serializer=logging_config.UpdateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, @@ -1002,7 +1078,7 @@ def delete_exclusion(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_exclusion' not in self._stubs: - self._stubs['delete_exclusion'] = self.grpc_channel.unary_unary( + self._stubs['delete_exclusion'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/DeleteExclusion', request_serializer=logging_config.DeleteExclusionRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -1037,7 +1113,7 @@ def get_cmek_settings(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_cmek_settings' not in self._stubs: - self._stubs['get_cmek_settings'] = self.grpc_channel.unary_unary( + self._stubs['get_cmek_settings'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/GetCmekSettings', request_serializer=logging_config.GetCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, @@ -1077,7 +1153,7 @@ def update_cmek_settings(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_cmek_settings' not in self._stubs: - self._stubs['update_cmek_settings'] = self.grpc_channel.unary_unary( + self._stubs['update_cmek_settings'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UpdateCmekSettings', request_serializer=logging_config.UpdateCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, @@ -1113,7 +1189,7 @@ def get_settings(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_settings' not in self._stubs: - self._stubs['get_settings'] = self.grpc_channel.unary_unary( + self._stubs['get_settings'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/GetSettings', request_serializer=logging_config.GetSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, @@ -1156,7 +1232,7 @@ def update_settings(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_settings' not in self._stubs: - self._stubs['update_settings'] = self.grpc_channel.unary_unary( + self._stubs['update_settings'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UpdateSettings', request_serializer=logging_config.UpdateSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, @@ -1183,7 +1259,7 @@ def copy_log_entries(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'copy_log_entries' not in self._stubs: - self._stubs['copy_log_entries'] = self.grpc_channel.unary_unary( + self._stubs['copy_log_entries'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/CopyLogEntries', request_serializer=logging_config.CopyLogEntriesRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -1430,6 +1506,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } def _wrap_method(self, func, *args, **kwargs): @@ -1438,7 +1529,7 @@ def _wrap_method(self, func, *args, **kwargs): return gapic_v1.method_async.wrap_method(func, *args, **kwargs) def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() @property def kind(self) -> str: @@ -1455,7 +1546,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -1473,7 +1564,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -1491,7 +1582,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 4e96eec30d..eb900db237 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, AsyncIterable, Awaitable, AsyncIterator, Sequence, Tuple, Type, Union @@ -41,6 +42,13 @@ from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport from .client import LoggingServiceV2Client +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) class LoggingServiceV2AsyncClient: """Service for ingesting and querying logs.""" @@ -227,13 +235,27 @@ def __init__(self, *, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.logging_v2.LoggingServiceV2AsyncClient`.", + extra = { + "serviceName": "google.logging.v2.LoggingServiceV2", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.logging.v2.LoggingServiceV2", + "credentialsType": None, + } + ) + async def delete_log(self, request: Optional[Union[logging.DeleteLogRequest, dict]] = None, *, log_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes all the log entries in a log for the \_Default Log Bucket. The log reappears if it receives new entries. Log @@ -288,8 +310,10 @@ async def sample_delete_log(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -341,7 +365,7 @@ async def write_log_entries(self, entries: Optional[MutableSequence[log_entry.LogEntry]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging.WriteLogEntriesResponse: r"""Writes log entries to Logging. This API method is the only way to send log entries to Logging. This method is @@ -472,8 +496,10 @@ async def sample_write_log_entries(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.WriteLogEntriesResponse: @@ -530,7 +556,7 @@ async def list_log_entries(self, order_by: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListLogEntriesAsyncPager: r"""Lists log entries. Use this method to retrieve log entries that originated from a project/folder/organization/billing account. @@ -617,8 +643,10 @@ async def sample_list_log_entries(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesAsyncPager: @@ -684,7 +712,7 @@ async def list_monitored_resource_descriptors(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListMonitoredResourceDescriptorsAsyncPager: r"""Lists the descriptors for monitored resource types used by Logging. @@ -722,8 +750,10 @@ async def sample_list_monitored_resource_descriptors(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsAsyncPager: @@ -775,7 +805,7 @@ async def list_logs(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListLogsAsyncPager: r"""Lists the logs in projects, organizations, folders, or billing accounts. Only logs that have entries are @@ -825,8 +855,10 @@ async def sample_list_logs(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsAsyncPager: @@ -897,7 +929,7 @@ def tail_log_entries(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> Awaitable[AsyncIterable[logging.TailLogEntriesResponse]]: r"""Streaming read of log entries as they are ingested. Until the stream is terminated, it will continue reading @@ -946,8 +978,10 @@ def request_generator(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: AsyncIterable[google.cloud.logging_v2.types.TailLogEntriesResponse]: @@ -978,7 +1012,7 @@ async def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -989,8 +1023,10 @@ async def list_operations( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -1003,11 +1039,7 @@ async def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -1032,7 +1064,7 @@ async def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1043,8 +1075,10 @@ async def get_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -1057,11 +1091,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -1086,7 +1116,7 @@ async def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1101,8 +1131,10 @@ async def cancel_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -1114,11 +1146,7 @@ async def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index db77c53b9b..a8a73e2a84 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -16,6 +16,7 @@ from collections import OrderedDict from http import HTTPStatus import json +import logging as std_logging import os import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union, cast @@ -38,6 +39,14 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.types import log_entry @@ -389,33 +398,6 @@ def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_ raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes(client_universe: str, - credentials: ga_credentials.Credentials) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = LoggingServiceV2Client._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError("The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default.") - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -425,9 +407,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = (self._is_universe_domain_valid or - LoggingServiceV2Client._compare_universes(self.universe_domain, self.transport._credentials)) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True def _add_cred_info_for_auth_errors( self, @@ -539,6 +521,10 @@ def __init__(self, *, # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError("client_options.api_key and credentials are mutually exclusive") @@ -591,13 +577,28 @@ def __init__(self, *, api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.logging_v2.LoggingServiceV2Client`.", + extra = { + "serviceName": "google.logging.v2.LoggingServiceV2", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.logging.v2.LoggingServiceV2", + "credentialsType": None, + } + ) + def delete_log(self, request: Optional[Union[logging.DeleteLogRequest, dict]] = None, *, log_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes all the log entries in a log for the \_Default Log Bucket. The log reappears if it receives new entries. Log @@ -652,8 +653,10 @@ def sample_delete_log(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -676,7 +679,7 @@ def sample_delete_log(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_log] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -704,7 +707,7 @@ def write_log_entries(self, entries: Optional[MutableSequence[log_entry.LogEntry]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging.WriteLogEntriesResponse: r"""Writes log entries to Logging. This API method is the only way to send log entries to Logging. This method is @@ -835,8 +838,10 @@ def sample_write_log_entries(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.WriteLogEntriesResponse: @@ -891,7 +896,7 @@ def list_log_entries(self, order_by: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListLogEntriesPager: r"""Lists log entries. Use this method to retrieve log entries that originated from a project/folder/organization/billing account. @@ -978,8 +983,10 @@ def sample_list_log_entries(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesPager: @@ -1044,7 +1051,7 @@ def list_monitored_resource_descriptors(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListMonitoredResourceDescriptorsPager: r"""Lists the descriptors for monitored resource types used by Logging. @@ -1082,8 +1089,10 @@ def sample_list_monitored_resource_descriptors(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsPager: @@ -1135,7 +1144,7 @@ def list_logs(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListLogsPager: r"""Lists the logs in projects, organizations, folders, or billing accounts. Only logs that have entries are @@ -1185,8 +1194,10 @@ def sample_list_logs(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsPager: @@ -1218,7 +1229,7 @@ def sample_list_logs(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_logs] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1256,7 +1267,7 @@ def tail_log_entries(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> Iterable[logging.TailLogEntriesResponse]: r"""Streaming read of log entries as they are ingested. Until the stream is terminated, it will continue reading @@ -1305,8 +1316,10 @@ def request_generator(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: Iterable[google.cloud.logging_v2.types.TailLogEntriesResponse]: @@ -1350,7 +1363,7 @@ def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -1361,8 +1374,10 @@ def list_operations( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -1375,11 +1390,7 @@ def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -1408,7 +1419,7 @@ def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1419,8 +1430,10 @@ def get_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -1433,11 +1446,7 @@ def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -1466,7 +1475,7 @@ def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1481,8 +1490,10 @@ def cancel_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -1494,11 +1505,7 @@ def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py index 13e9963f79..c4f0739ea1 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -53,7 +53,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -66,8 +66,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging.ListLogEntriesRequest(request) @@ -119,7 +121,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -132,8 +134,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging.ListLogEntriesRequest(request) @@ -188,7 +192,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -201,8 +205,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging.ListMonitoredResourceDescriptorsRequest(request) @@ -254,7 +260,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -267,8 +273,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging.ListMonitoredResourceDescriptorsRequest(request) @@ -323,7 +331,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -336,8 +344,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging.ListLogsRequest(request) @@ -389,7 +399,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -402,8 +412,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging.ListLogsRequest(request) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 3ec63b6148..ff6d8c8dd2 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -221,6 +221,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=3600.0, client_info=client_info, ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index aff5aad6e1..718d4ac671 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -21,14 +24,85 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.cloud.logging_v2.types import logging from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.logging.v2.LoggingServiceV2", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.logging.v2.LoggingServiceV2", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class LoggingServiceV2GrpcTransport(LoggingServiceV2Transport): """gRPC backend transport for LoggingServiceV2. @@ -179,7 +253,10 @@ def __init__(self, *, ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -256,7 +333,7 @@ def delete_log(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_log' not in self._stubs: - self._stubs['delete_log'] = self.grpc_channel.unary_unary( + self._stubs['delete_log'] = self._logged_channel.unary_unary( '/google.logging.v2.LoggingServiceV2/DeleteLog', request_serializer=logging.DeleteLogRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -288,7 +365,7 @@ def write_log_entries(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'write_log_entries' not in self._stubs: - self._stubs['write_log_entries'] = self.grpc_channel.unary_unary( + self._stubs['write_log_entries'] = self._logged_channel.unary_unary( '/google.logging.v2.LoggingServiceV2/WriteLogEntries', request_serializer=logging.WriteLogEntriesRequest.serialize, response_deserializer=logging.WriteLogEntriesResponse.deserialize, @@ -317,7 +394,7 @@ def list_log_entries(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_log_entries' not in self._stubs: - self._stubs['list_log_entries'] = self.grpc_channel.unary_unary( + self._stubs['list_log_entries'] = self._logged_channel.unary_unary( '/google.logging.v2.LoggingServiceV2/ListLogEntries', request_serializer=logging.ListLogEntriesRequest.serialize, response_deserializer=logging.ListLogEntriesResponse.deserialize, @@ -345,7 +422,7 @@ def list_monitored_resource_descriptors(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_monitored_resource_descriptors' not in self._stubs: - self._stubs['list_monitored_resource_descriptors'] = self.grpc_channel.unary_unary( + self._stubs['list_monitored_resource_descriptors'] = self._logged_channel.unary_unary( '/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors', request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize, response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize, @@ -373,7 +450,7 @@ def list_logs(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_logs' not in self._stubs: - self._stubs['list_logs'] = self.grpc_channel.unary_unary( + self._stubs['list_logs'] = self._logged_channel.unary_unary( '/google.logging.v2.LoggingServiceV2/ListLogs', request_serializer=logging.ListLogsRequest.serialize, response_deserializer=logging.ListLogsResponse.deserialize, @@ -401,7 +478,7 @@ def tail_log_entries(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'tail_log_entries' not in self._stubs: - self._stubs['tail_log_entries'] = self.grpc_channel.stream_stream( + self._stubs['tail_log_entries'] = self._logged_channel.stream_stream( '/google.logging.v2.LoggingServiceV2/TailLogEntries', request_serializer=logging.TailLogEntriesRequest.serialize, response_deserializer=logging.TailLogEntriesResponse.deserialize, @@ -409,7 +486,7 @@ def tail_log_entries(self) -> Callable[ return self._stubs['tail_log_entries'] def close(self): - self.grpc_channel.close() + self._logged_channel.close() @property def cancel_operation( @@ -422,7 +499,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -440,7 +517,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -458,7 +535,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index fa0d3072d4..6933d0f2ba 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -14,6 +14,9 @@ # limitations under the License. # import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -23,8 +26,11 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging @@ -33,6 +39,73 @@ from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import LoggingServiceV2GrpcTransport +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.logging.v2.LoggingServiceV2", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.logging.v2.LoggingServiceV2", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class LoggingServiceV2GrpcAsyncIOTransport(LoggingServiceV2Transport): """gRPC AsyncIO backend transport for LoggingServiceV2. @@ -225,8 +298,11 @@ def __init__(self, *, ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -262,7 +338,7 @@ def delete_log(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_log' not in self._stubs: - self._stubs['delete_log'] = self.grpc_channel.unary_unary( + self._stubs['delete_log'] = self._logged_channel.unary_unary( '/google.logging.v2.LoggingServiceV2/DeleteLog', request_serializer=logging.DeleteLogRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -294,7 +370,7 @@ def write_log_entries(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'write_log_entries' not in self._stubs: - self._stubs['write_log_entries'] = self.grpc_channel.unary_unary( + self._stubs['write_log_entries'] = self._logged_channel.unary_unary( '/google.logging.v2.LoggingServiceV2/WriteLogEntries', request_serializer=logging.WriteLogEntriesRequest.serialize, response_deserializer=logging.WriteLogEntriesResponse.deserialize, @@ -323,7 +399,7 @@ def list_log_entries(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_log_entries' not in self._stubs: - self._stubs['list_log_entries'] = self.grpc_channel.unary_unary( + self._stubs['list_log_entries'] = self._logged_channel.unary_unary( '/google.logging.v2.LoggingServiceV2/ListLogEntries', request_serializer=logging.ListLogEntriesRequest.serialize, response_deserializer=logging.ListLogEntriesResponse.deserialize, @@ -351,7 +427,7 @@ def list_monitored_resource_descriptors(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_monitored_resource_descriptors' not in self._stubs: - self._stubs['list_monitored_resource_descriptors'] = self.grpc_channel.unary_unary( + self._stubs['list_monitored_resource_descriptors'] = self._logged_channel.unary_unary( '/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors', request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize, response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize, @@ -379,7 +455,7 @@ def list_logs(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_logs' not in self._stubs: - self._stubs['list_logs'] = self.grpc_channel.unary_unary( + self._stubs['list_logs'] = self._logged_channel.unary_unary( '/google.logging.v2.LoggingServiceV2/ListLogs', request_serializer=logging.ListLogsRequest.serialize, response_deserializer=logging.ListLogsResponse.deserialize, @@ -407,7 +483,7 @@ def tail_log_entries(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'tail_log_entries' not in self._stubs: - self._stubs['tail_log_entries'] = self.grpc_channel.stream_stream( + self._stubs['tail_log_entries'] = self._logged_channel.stream_stream( '/google.logging.v2.LoggingServiceV2/TailLogEntries', request_serializer=logging.TailLogEntriesRequest.serialize, response_deserializer=logging.TailLogEntriesResponse.deserialize, @@ -513,6 +589,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=3600.0, client_info=client_info, ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } def _wrap_method(self, func, *args, **kwargs): @@ -521,7 +612,7 @@ def _wrap_method(self, func, *args, **kwargs): return gapic_v1.method_async.wrap_method(func, *args, **kwargs) def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() @property def kind(self) -> str: @@ -538,7 +629,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -556,7 +647,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -574,7 +665,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 01d94e8ff2..8f1ec70d9c 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union @@ -42,6 +43,13 @@ from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport from .client import MetricsServiceV2Client +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) class MetricsServiceV2AsyncClient: """Service for configuring logs-based metrics.""" @@ -228,13 +236,27 @@ def __init__(self, *, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.logging_v2.MetricsServiceV2AsyncClient`.", + extra = { + "serviceName": "google.logging.v2.MetricsServiceV2", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.logging.v2.MetricsServiceV2", + "credentialsType": None, + } + ) + async def list_log_metrics(self, request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListLogMetricsAsyncPager: r"""Lists logs-based metrics. @@ -282,8 +304,10 @@ async def sample_list_log_metrics(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsAsyncPager: @@ -355,7 +379,7 @@ async def get_log_metric(self, metric_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. @@ -401,8 +425,10 @@ async def sample_get_log_metric(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogMetric: @@ -471,7 +497,7 @@ async def create_log_metric(self, metric: Optional[logging_metrics.LogMetric] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. @@ -533,8 +559,10 @@ async def sample_create_log_metric(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogMetric: @@ -605,7 +633,7 @@ async def update_log_metric(self, metric: Optional[logging_metrics.LogMetric] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. @@ -666,8 +694,10 @@ async def sample_update_log_metric(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogMetric: @@ -737,7 +767,7 @@ async def delete_log_metric(self, metric_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a logs-based metric. @@ -780,8 +810,10 @@ async def sample_delete_log_metric(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -830,7 +862,7 @@ async def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -841,8 +873,10 @@ async def list_operations( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -855,11 +889,7 @@ async def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -884,7 +914,7 @@ async def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -895,8 +925,10 @@ async def get_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -909,11 +941,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -938,7 +966,7 @@ async def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -953,8 +981,10 @@ async def cancel_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -966,11 +996,7 @@ async def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index c2aa3c37dd..fa733f32bc 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -16,6 +16,7 @@ from collections import OrderedDict from http import HTTPStatus import json +import logging as std_logging import os import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast @@ -38,6 +39,14 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.api import distribution_pb2 # type: ignore from google.api import metric_pb2 # type: ignore from google.cloud.logging_v2.services.metrics_service_v2 import pagers @@ -390,33 +399,6 @@ def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_ raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes(client_universe: str, - credentials: ga_credentials.Credentials) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = MetricsServiceV2Client._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError("The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default.") - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -426,9 +408,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = (self._is_universe_domain_valid or - MetricsServiceV2Client._compare_universes(self.universe_domain, self.transport._credentials)) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True def _add_cred_info_for_auth_errors( self, @@ -540,6 +522,10 @@ def __init__(self, *, # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError("client_options.api_key and credentials are mutually exclusive") @@ -592,13 +578,28 @@ def __init__(self, *, api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.logging_v2.MetricsServiceV2Client`.", + extra = { + "serviceName": "google.logging.v2.MetricsServiceV2", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.logging.v2.MetricsServiceV2", + "credentialsType": None, + } + ) + def list_log_metrics(self, request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListLogMetricsPager: r"""Lists logs-based metrics. @@ -646,8 +647,10 @@ def sample_list_log_metrics(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsPager: @@ -679,7 +682,7 @@ def sample_list_log_metrics(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_log_metrics] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -718,7 +721,7 @@ def get_log_metric(self, metric_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. @@ -764,8 +767,10 @@ def sample_get_log_metric(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogMetric: @@ -804,7 +809,7 @@ def sample_get_log_metric(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_log_metric] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -833,7 +838,7 @@ def create_log_metric(self, metric: Optional[logging_metrics.LogMetric] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. @@ -895,8 +900,10 @@ def sample_create_log_metric(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogMetric: @@ -937,7 +944,7 @@ def sample_create_log_metric(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_log_metric] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -966,7 +973,7 @@ def update_log_metric(self, metric: Optional[logging_metrics.LogMetric] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. @@ -1027,8 +1034,10 @@ def sample_update_log_metric(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogMetric: @@ -1069,7 +1078,7 @@ def sample_update_log_metric(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_log_metric] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1097,7 +1106,7 @@ def delete_log_metric(self, metric_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a logs-based metric. @@ -1140,8 +1149,10 @@ def sample_delete_log_metric(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -1164,7 +1175,7 @@ def sample_delete_log_metric(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_log_metric] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1202,7 +1213,7 @@ def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -1213,8 +1224,10 @@ def list_operations( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -1227,11 +1240,7 @@ def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -1260,7 +1269,7 @@ def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1271,8 +1280,10 @@ def get_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -1285,11 +1296,7 @@ def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -1318,7 +1325,7 @@ def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1333,8 +1340,10 @@ def cancel_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -1346,11 +1355,7 @@ def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index 134bd0dedb..fc24bb44c9 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -51,7 +51,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -64,8 +64,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_metrics.ListLogMetricsRequest(request) @@ -117,7 +119,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -130,8 +132,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_metrics.ListLogMetricsRequest(request) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 941b7050f7..3fdfd91333 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -194,6 +194,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 04c8ab0512..f393fd814e 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -21,14 +24,85 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.cloud.logging_v2.types import logging_metrics from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.logging.v2.MetricsServiceV2", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.logging.v2.MetricsServiceV2", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class MetricsServiceV2GrpcTransport(MetricsServiceV2Transport): """gRPC backend transport for MetricsServiceV2. @@ -179,7 +253,10 @@ def __init__(self, *, ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -252,7 +329,7 @@ def list_log_metrics(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_log_metrics' not in self._stubs: - self._stubs['list_log_metrics'] = self.grpc_channel.unary_unary( + self._stubs['list_log_metrics'] = self._logged_channel.unary_unary( '/google.logging.v2.MetricsServiceV2/ListLogMetrics', request_serializer=logging_metrics.ListLogMetricsRequest.serialize, response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize, @@ -278,7 +355,7 @@ def get_log_metric(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_log_metric' not in self._stubs: - self._stubs['get_log_metric'] = self.grpc_channel.unary_unary( + self._stubs['get_log_metric'] = self._logged_channel.unary_unary( '/google.logging.v2.MetricsServiceV2/GetLogMetric', request_serializer=logging_metrics.GetLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, @@ -304,7 +381,7 @@ def create_log_metric(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_log_metric' not in self._stubs: - self._stubs['create_log_metric'] = self.grpc_channel.unary_unary( + self._stubs['create_log_metric'] = self._logged_channel.unary_unary( '/google.logging.v2.MetricsServiceV2/CreateLogMetric', request_serializer=logging_metrics.CreateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, @@ -330,7 +407,7 @@ def update_log_metric(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_log_metric' not in self._stubs: - self._stubs['update_log_metric'] = self.grpc_channel.unary_unary( + self._stubs['update_log_metric'] = self._logged_channel.unary_unary( '/google.logging.v2.MetricsServiceV2/UpdateLogMetric', request_serializer=logging_metrics.UpdateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, @@ -356,7 +433,7 @@ def delete_log_metric(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_log_metric' not in self._stubs: - self._stubs['delete_log_metric'] = self.grpc_channel.unary_unary( + self._stubs['delete_log_metric'] = self._logged_channel.unary_unary( '/google.logging.v2.MetricsServiceV2/DeleteLogMetric', request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -364,7 +441,7 @@ def delete_log_metric(self) -> Callable[ return self._stubs['delete_log_metric'] def close(self): - self.grpc_channel.close() + self._logged_channel.close() @property def cancel_operation( @@ -377,7 +454,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -395,7 +472,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -413,7 +490,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 91fb8fab4f..b382f48506 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -14,6 +14,9 @@ # limitations under the License. # import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -23,8 +26,11 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging_metrics @@ -33,6 +39,73 @@ from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import MetricsServiceV2GrpcTransport +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.logging.v2.MetricsServiceV2", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.logging.v2.MetricsServiceV2", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class MetricsServiceV2GrpcAsyncIOTransport(MetricsServiceV2Transport): """gRPC AsyncIO backend transport for MetricsServiceV2. @@ -225,8 +298,11 @@ def __init__(self, *, ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -258,7 +334,7 @@ def list_log_metrics(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_log_metrics' not in self._stubs: - self._stubs['list_log_metrics'] = self.grpc_channel.unary_unary( + self._stubs['list_log_metrics'] = self._logged_channel.unary_unary( '/google.logging.v2.MetricsServiceV2/ListLogMetrics', request_serializer=logging_metrics.ListLogMetricsRequest.serialize, response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize, @@ -284,7 +360,7 @@ def get_log_metric(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_log_metric' not in self._stubs: - self._stubs['get_log_metric'] = self.grpc_channel.unary_unary( + self._stubs['get_log_metric'] = self._logged_channel.unary_unary( '/google.logging.v2.MetricsServiceV2/GetLogMetric', request_serializer=logging_metrics.GetLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, @@ -310,7 +386,7 @@ def create_log_metric(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_log_metric' not in self._stubs: - self._stubs['create_log_metric'] = self.grpc_channel.unary_unary( + self._stubs['create_log_metric'] = self._logged_channel.unary_unary( '/google.logging.v2.MetricsServiceV2/CreateLogMetric', request_serializer=logging_metrics.CreateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, @@ -336,7 +412,7 @@ def update_log_metric(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_log_metric' not in self._stubs: - self._stubs['update_log_metric'] = self.grpc_channel.unary_unary( + self._stubs['update_log_metric'] = self._logged_channel.unary_unary( '/google.logging.v2.MetricsServiceV2/UpdateLogMetric', request_serializer=logging_metrics.UpdateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, @@ -362,7 +438,7 @@ def delete_log_metric(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_log_metric' not in self._stubs: - self._stubs['delete_log_metric'] = self.grpc_channel.unary_unary( + self._stubs['delete_log_metric'] = self._logged_channel.unary_unary( '/google.logging.v2.MetricsServiceV2/DeleteLogMetric', request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -441,6 +517,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } def _wrap_method(self, func, *args, **kwargs): @@ -449,7 +540,7 @@ def _wrap_method(self, func, *args, **kwargs): return gapic_v1.method_async.wrap_method(func, *args, **kwargs) def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() @property def kind(self) -> str: @@ -466,7 +557,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -484,7 +575,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -502,7 +593,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, diff --git a/tests/integration/goldens/logging/noxfile.py b/tests/integration/goldens/logging/noxfile.py index 6402e148ff..8d723fa938 100755 --- a/tests/integration/goldens/logging/noxfile.py +++ b/tests/integration/goldens/logging/noxfile.py @@ -29,7 +29,8 @@ "3.9", "3.10", "3.11", - "3.12" + "3.12", + "3.13", ] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -39,7 +40,7 @@ BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.12" +DEFAULT_PYTHON_VERSION = "3.13" nox.sessions = [ "unit", @@ -61,7 +62,7 @@ def unit(session, protobuf_implementation): """Run the unit test suite.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') @@ -94,7 +95,7 @@ def unit(session, protobuf_implementation): def prerelease_deps(session, protobuf_implementation): """Run the unit test suite against pre-release versions of dependencies.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install test environment dependencies @@ -129,7 +130,8 @@ def prerelease_deps(session, protobuf_implementation): "googleapis-common-protos", "google-api-core", "google-auth", - "grpcio", + # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 + "grpcio!=1.67.0rc1", "grpcio-status", "protobuf", "proto-plus", diff --git a/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index b62675ba64..50c444f70b 100755 --- a/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -43,7 +43,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -119,7 +119,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -196,7 +196,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -272,7 +272,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -349,7 +349,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogBucket", @@ -425,7 +425,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogBucket", @@ -510,7 +510,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogExclusion", @@ -594,7 +594,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogExclusion", @@ -683,7 +683,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -771,7 +771,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -856,7 +856,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogSink", @@ -940,7 +940,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogSink", @@ -1017,7 +1017,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogView", @@ -1093,7 +1093,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogView", @@ -1170,7 +1170,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_bucket" @@ -1243,7 +1243,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_bucket" @@ -1321,7 +1321,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_exclusion" @@ -1398,7 +1398,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_exclusion" @@ -1476,7 +1476,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -1556,7 +1556,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -1637,7 +1637,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_sink" @@ -1714,7 +1714,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_sink" @@ -1788,7 +1788,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_view" @@ -1861,7 +1861,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_view" @@ -1935,7 +1935,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogBucket", @@ -2011,7 +2011,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogBucket", @@ -2088,7 +2088,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.CmekSettings", @@ -2164,7 +2164,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.CmekSettings", @@ -2245,7 +2245,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogExclusion", @@ -2325,7 +2325,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogExclusion", @@ -2406,7 +2406,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.Link", @@ -2486,7 +2486,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.Link", @@ -2567,7 +2567,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.Settings", @@ -2647,7 +2647,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.Settings", @@ -2728,7 +2728,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogSink", @@ -2808,7 +2808,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogSink", @@ -2885,7 +2885,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogView", @@ -2961,7 +2961,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogView", @@ -3042,7 +3042,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager", @@ -3122,7 +3122,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager", @@ -3203,7 +3203,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager", @@ -3283,7 +3283,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager", @@ -3364,7 +3364,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksAsyncPager", @@ -3444,7 +3444,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksPager", @@ -3525,7 +3525,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksAsyncPager", @@ -3605,7 +3605,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksPager", @@ -3686,7 +3686,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsAsyncPager", @@ -3766,7 +3766,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsPager", @@ -3843,7 +3843,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "undelete_bucket" @@ -3916,7 +3916,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "undelete_bucket" @@ -3990,7 +3990,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -4066,7 +4066,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -4143,7 +4143,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogBucket", @@ -4219,7 +4219,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogBucket", @@ -4296,7 +4296,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.CmekSettings", @@ -4372,7 +4372,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.CmekSettings", @@ -4461,7 +4461,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogExclusion", @@ -4549,7 +4549,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogExclusion", @@ -4634,7 +4634,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.Settings", @@ -4718,7 +4718,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.Settings", @@ -4807,7 +4807,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogSink", @@ -4895,7 +4895,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogSink", @@ -4972,7 +4972,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogView", @@ -5048,7 +5048,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogView", @@ -5129,7 +5129,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_log" @@ -5206,7 +5206,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_log" @@ -5292,7 +5292,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesAsyncPager", @@ -5380,7 +5380,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesPager", @@ -5461,7 +5461,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsAsyncPager", @@ -5541,7 +5541,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsPager", @@ -5618,7 +5618,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsAsyncPager", @@ -5694,7 +5694,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsPager", @@ -5771,7 +5771,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "Iterable[google.cloud.logging_v2.types.TailLogEntriesResponse]", @@ -5847,7 +5847,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "Iterable[google.cloud.logging_v2.types.TailLogEntriesResponse]", @@ -5940,7 +5940,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.WriteLogEntriesResponse", @@ -6032,7 +6032,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.WriteLogEntriesResponse", @@ -6117,7 +6117,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogMetric", @@ -6201,7 +6201,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogMetric", @@ -6282,7 +6282,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_log_metric" @@ -6359,7 +6359,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_log_metric" @@ -6437,7 +6437,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogMetric", @@ -6517,7 +6517,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogMetric", @@ -6598,7 +6598,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsAsyncPager", @@ -6678,7 +6678,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsPager", @@ -6763,7 +6763,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogMetric", @@ -6847,7 +6847,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogMetric", diff --git a/tests/integration/goldens/logging/setup.py b/tests/integration/goldens/logging/setup.py index 8bfa182432..69fe4db623 100755 --- a/tests/integration/goldens/logging/setup.py +++ b/tests/integration/goldens/logging/setup.py @@ -44,8 +44,11 @@ # See https://github.com/googleapis/google-cloud-python/issues/12364 "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", + "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] +extras = { +} url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-logging" package_root = os.path.abspath(os.path.dirname(__file__)) @@ -81,6 +84,7 @@ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Operating System :: OS Independent", "Topic :: Internet", ], @@ -88,6 +92,7 @@ packages=packages, python_requires=">=3.7", install_requires=dependencies, + extras_require=extras, include_package_data=True, zip_safe=False, ) diff --git a/tests/integration/goldens/logging/testing/constraints-3.13.txt b/tests/integration/goldens/logging/testing/constraints-3.13.txt new file mode 100755 index 0000000000..ed7f9aed25 --- /dev/null +++ b/tests/integration/goldens/logging/testing/constraints-3.13.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 29f8335666..3b94a004c9 100755 --- a/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -62,6 +62,7 @@ import google.auth + CRED_INFO_JSON = { "credential_source": "/path/to/file", "credential_type": "service account credentials", @@ -69,6 +70,12 @@ } CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -185,6 +192,7 @@ def test__get_universe_domain(): ConfigServiceV2Client._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." + @pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ (401, CRED_INFO_JSON, True), (403, CRED_INFO_JSON, True), @@ -841,25 +849,6 @@ def test_list_buckets(request_type, transport: str = 'grpc'): assert response.next_page_token == 'next_page_token_value' -def test_list_buckets_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_buckets() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListBucketsRequest() - - def test_list_buckets_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -921,29 +910,6 @@ def test_list_buckets_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_buckets_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse( - next_page_token='next_page_token_value', - )) - await client.list_buckets() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListBucketsRequest() - - @pytest.mark.asyncio async def test_list_buckets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1014,7 +980,6 @@ async def test_list_buckets_async(transport: str = 'grpc_asyncio', request_type= async def test_list_buckets_async_from_dict(): await test_list_buckets_async(request_type=dict) - def test_list_buckets_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1403,25 +1368,6 @@ def test_get_bucket(request_type, transport: str = 'grpc'): assert response.restricted_fields == ['restricted_fields_value'] -def test_get_bucket_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_bucket), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetBucketRequest() - - def test_get_bucket_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1481,35 +1427,6 @@ def test_get_bucket_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_bucket_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_bucket), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - analytics_enabled=True, - restricted_fields=['restricted_fields_value'], - )) - await client.get_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetBucketRequest() - - @pytest.mark.asyncio async def test_get_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1592,7 +1509,6 @@ async def test_get_bucket_async(transport: str = 'grpc_asyncio', request_type=lo async def test_get_bucket_async_from_dict(): await test_get_bucket_async(request_type=dict) - def test_get_bucket_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1688,25 +1604,6 @@ def test_create_bucket_async(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_create_bucket_async_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_bucket_async), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_bucket_async() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() - - def test_create_bucket_async_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1773,29 +1670,6 @@ def test_create_bucket_async_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_bucket_async_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_bucket_async), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_bucket_async() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() - - @pytest.mark.asyncio async def test_create_bucket_async_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1870,7 +1744,6 @@ async def test_create_bucket_async_async(transport: str = 'grpc_asyncio', reques async def test_create_bucket_async_async_from_dict(): await test_create_bucket_async_async(request_type=dict) - def test_create_bucket_async_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1966,25 +1839,6 @@ def test_update_bucket_async(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_update_bucket_async_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bucket_async), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_bucket_async() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() - - def test_update_bucket_async_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2049,29 +1903,6 @@ def test_update_bucket_async_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_bucket_async_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bucket_async), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_bucket_async() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() - - @pytest.mark.asyncio async def test_update_bucket_async_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2146,7 +1977,6 @@ async def test_update_bucket_async_async(transport: str = 'grpc_asyncio', reques async def test_update_bucket_async_async_from_dict(): await test_update_bucket_async_async(request_type=dict) - def test_update_bucket_async_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2257,25 +2087,6 @@ def test_create_bucket(request_type, transport: str = 'grpc'): assert response.restricted_fields == ['restricted_fields_value'] -def test_create_bucket_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_bucket), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() - - def test_create_bucket_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2337,35 +2148,6 @@ def test_create_bucket_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_bucket_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_bucket), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - analytics_enabled=True, - restricted_fields=['restricted_fields_value'], - )) - await client.create_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() - - @pytest.mark.asyncio async def test_create_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2448,7 +2230,6 @@ async def test_create_bucket_async(transport: str = 'grpc_asyncio', request_type async def test_create_bucket_async_from_dict(): await test_create_bucket_async(request_type=dict) - def test_create_bucket_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2559,25 +2340,6 @@ def test_update_bucket(request_type, transport: str = 'grpc'): assert response.restricted_fields == ['restricted_fields_value'] -def test_update_bucket_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bucket), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() - - def test_update_bucket_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2637,35 +2399,6 @@ def test_update_bucket_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_bucket_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bucket), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - analytics_enabled=True, - restricted_fields=['restricted_fields_value'], - )) - await client.update_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() - - @pytest.mark.asyncio async def test_update_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2748,7 +2481,6 @@ async def test_update_bucket_async(transport: str = 'grpc_asyncio', request_type async def test_update_bucket_async_from_dict(): await test_update_bucket_async(request_type=dict) - def test_update_bucket_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2844,25 +2576,6 @@ def test_delete_bucket(request_type, transport: str = 'grpc'): assert response is None -def test_delete_bucket_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_bucket), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteBucketRequest() - - def test_delete_bucket_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2922,27 +2635,6 @@ def test_delete_bucket_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_bucket_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_bucket), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteBucketRequest() - - @pytest.mark.asyncio async def test_delete_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3010,7 +2702,6 @@ async def test_delete_bucket_async(transport: str = 'grpc_asyncio', request_type async def test_delete_bucket_async_from_dict(): await test_delete_bucket_async(request_type=dict) - def test_delete_bucket_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -3106,28 +2797,9 @@ def test_undelete_bucket(request_type, transport: str = 'grpc'): assert response is None -def test_undelete_bucket_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undelete_bucket), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.undelete_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UndeleteBucketRequest() - - -def test_undelete_bucket_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. +def test_undelete_bucket_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport='grpc', @@ -3184,27 +2856,6 @@ def test_undelete_bucket_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_undelete_bucket_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undelete_bucket), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.undelete_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UndeleteBucketRequest() - - @pytest.mark.asyncio async def test_undelete_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3272,7 +2923,6 @@ async def test_undelete_bucket_async(transport: str = 'grpc_asyncio', request_ty async def test_undelete_bucket_async_from_dict(): await test_undelete_bucket_async(request_type=dict) - def test_undelete_bucket_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -3371,25 +3021,6 @@ def test_list_views(request_type, transport: str = 'grpc'): assert response.next_page_token == 'next_page_token_value' -def test_list_views_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_views() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListViewsRequest() - - def test_list_views_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3451,29 +3082,6 @@ def test_list_views_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_views_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse( - next_page_token='next_page_token_value', - )) - await client.list_views() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListViewsRequest() - - @pytest.mark.asyncio async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3544,7 +3152,6 @@ async def test_list_views_async(transport: str = 'grpc_asyncio', request_type=lo async def test_list_views_async_from_dict(): await test_list_views_async(request_type=dict) - def test_list_views_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -3925,25 +3532,6 @@ def test_get_view(request_type, transport: str = 'grpc'): assert response.filter == 'filter_value' -def test_get_view_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_view), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetViewRequest() - - def test_get_view_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4003,31 +3591,6 @@ def test_get_view_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_view_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_view), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', - )) - await client.get_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetViewRequest() - - @pytest.mark.asyncio async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4102,7 +3665,6 @@ async def test_get_view_async(transport: str = 'grpc_asyncio', request_type=logg async def test_get_view_async_from_dict(): await test_get_view_async(request_type=dict) - def test_get_view_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -4205,25 +3767,6 @@ def test_create_view(request_type, transport: str = 'grpc'): assert response.filter == 'filter_value' -def test_create_view_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_view), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateViewRequest() - - def test_create_view_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4285,31 +3828,6 @@ def test_create_view_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_view_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_view), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', - )) - await client.create_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateViewRequest() - - @pytest.mark.asyncio async def test_create_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4384,7 +3902,6 @@ async def test_create_view_async(transport: str = 'grpc_asyncio', request_type=l async def test_create_view_async_from_dict(): await test_create_view_async(request_type=dict) - def test_create_view_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -4487,25 +4004,6 @@ def test_update_view(request_type, transport: str = 'grpc'): assert response.filter == 'filter_value' -def test_update_view_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_view), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateViewRequest() - - def test_update_view_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4565,31 +4063,6 @@ def test_update_view_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_view_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_view), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', - )) - await client.update_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateViewRequest() - - @pytest.mark.asyncio async def test_update_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4664,7 +4137,6 @@ async def test_update_view_async(transport: str = 'grpc_asyncio', request_type=l async def test_update_view_async_from_dict(): await test_update_view_async(request_type=dict) - def test_update_view_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -4760,25 +4232,6 @@ def test_delete_view(request_type, transport: str = 'grpc'): assert response is None -def test_delete_view_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_view), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() - - def test_delete_view_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4838,27 +4291,6 @@ def test_delete_view_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_view_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_view), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() - - @pytest.mark.asyncio async def test_delete_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4926,7 +4358,6 @@ async def test_delete_view_async(transport: str = 'grpc_asyncio', request_type=l async def test_delete_view_async_from_dict(): await test_delete_view_async(request_type=dict) - def test_delete_view_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -5025,25 +4456,6 @@ def test_list_sinks(request_type, transport: str = 'grpc'): assert response.next_page_token == 'next_page_token_value' -def test_list_sinks_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_sinks() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListSinksRequest() - - def test_list_sinks_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -5105,29 +4517,6 @@ def test_list_sinks_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_sinks_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse( - next_page_token='next_page_token_value', - )) - await client.list_sinks() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListSinksRequest() - - @pytest.mark.asyncio async def test_list_sinks_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5198,7 +4587,6 @@ async def test_list_sinks_async(transport: str = 'grpc_asyncio', request_type=lo async def test_list_sinks_async_from_dict(): await test_list_sinks_async(request_type=dict) - def test_list_sinks_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -5589,25 +4977,6 @@ def test_get_sink(request_type, transport: str = 'grpc'): assert response.include_children is True -def test_get_sink_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_sink), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSinkRequest() - - def test_get_sink_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -5667,36 +5036,6 @@ def test_get_sink_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_sink_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', - include_children=True, - )) - await client.get_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSinkRequest() - - @pytest.mark.asyncio async def test_get_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5781,7 +5120,6 @@ async def test_get_sink_async(transport: str = 'grpc_asyncio', request_type=logg async def test_get_sink_async_from_dict(): await test_get_sink_async(request_type=dict) - def test_get_sink_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -5976,25 +5314,6 @@ def test_create_sink(request_type, transport: str = 'grpc'): assert response.include_children is True -def test_create_sink_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_sink), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateSinkRequest() - - def test_create_sink_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -6054,36 +5373,6 @@ def test_create_sink_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_sink_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', - include_children=True, - )) - await client.create_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateSinkRequest() - - @pytest.mark.asyncio async def test_create_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6168,7 +5457,6 @@ async def test_create_sink_async(transport: str = 'grpc_asyncio', request_type=l async def test_create_sink_async_from_dict(): await test_create_sink_async(request_type=dict) - def test_create_sink_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -6373,25 +5661,6 @@ def test_update_sink(request_type, transport: str = 'grpc'): assert response.include_children is True -def test_update_sink_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_sink), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSinkRequest() - - def test_update_sink_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -6451,36 +5720,6 @@ def test_update_sink_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_sink_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', - include_children=True, - )) - await client.update_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSinkRequest() - - @pytest.mark.asyncio async def test_update_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6565,7 +5804,6 @@ async def test_update_sink_async(transport: str = 'grpc_asyncio', request_type=l async def test_update_sink_async_from_dict(): await test_update_sink_async(request_type=dict) - def test_update_sink_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -6763,25 +6001,6 @@ def test_delete_sink(request_type, transport: str = 'grpc'): assert response is None -def test_delete_sink_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_sink), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteSinkRequest() - - def test_delete_sink_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -6841,27 +6060,6 @@ def test_delete_sink_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_sink_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteSinkRequest() - - @pytest.mark.asyncio async def test_delete_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6929,7 +6127,6 @@ async def test_delete_sink_async(transport: str = 'grpc_asyncio', request_type=l async def test_delete_sink_async_from_dict(): await test_delete_sink_async(request_type=dict) - def test_delete_sink_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -7107,25 +6304,6 @@ def test_create_link(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_create_link_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_link), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_link() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateLinkRequest() - - def test_create_link_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -7192,29 +6370,6 @@ def test_create_link_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_link_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_link), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_link() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateLinkRequest() - - @pytest.mark.asyncio async def test_create_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7289,7 +6444,6 @@ async def test_create_link_async(transport: str = 'grpc_asyncio', request_type=l async def test_create_link_async_from_dict(): await test_create_link_async(request_type=dict) - def test_create_link_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -7489,25 +6643,6 @@ def test_delete_link(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_delete_link_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_link), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_link() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteLinkRequest() - - def test_delete_link_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -7572,29 +6707,6 @@ def test_delete_link_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_link_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_link), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_link() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteLinkRequest() - - @pytest.mark.asyncio async def test_delete_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7669,7 +6781,6 @@ async def test_delete_link_async(transport: str = 'grpc_asyncio', request_type=l async def test_delete_link_async_from_dict(): await test_delete_link_async(request_type=dict) - def test_delete_link_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -7852,25 +6963,6 @@ def test_list_links(request_type, transport: str = 'grpc'): assert response.next_page_token == 'next_page_token_value' -def test_list_links_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_links), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_links() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListLinksRequest() - - def test_list_links_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -7932,29 +7024,6 @@ def test_list_links_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_links_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_links), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse( - next_page_token='next_page_token_value', - )) - await client.list_links() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListLinksRequest() - - @pytest.mark.asyncio async def test_list_links_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -8025,7 +7094,6 @@ async def test_list_links_async(transport: str = 'grpc_asyncio', request_type=lo async def test_list_links_async_from_dict(): await test_list_links_async(request_type=dict) - def test_list_links_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -8406,25 +7474,6 @@ def test_get_link(request_type, transport: str = 'grpc'): assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE -def test_get_link_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_link), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_link() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetLinkRequest() - - def test_get_link_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -8485,39 +7534,14 @@ def test_get_link_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_link_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_link), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link( - name='name_value', - description='description_value', - lifecycle_state=logging_config.LifecycleState.ACTIVE, - )) - await client.get_link() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetLinkRequest() - - -@pytest.mark.asyncio -async def test_get_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) +async def test_get_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) # Should wrap all calls on client creation assert wrapper_fn.call_count > 0 @@ -8583,7 +7607,6 @@ async def test_get_link_async(transport: str = 'grpc_asyncio', request_type=logg async def test_get_link_async_from_dict(): await test_get_link_async(request_type=dict) - def test_get_link_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -8764,25 +7787,6 @@ def test_list_exclusions(request_type, transport: str = 'grpc'): assert response.next_page_token == 'next_page_token_value' -def test_list_exclusions_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_exclusions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() - - def test_list_exclusions_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -8844,29 +7848,6 @@ def test_list_exclusions_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_exclusions_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse( - next_page_token='next_page_token_value', - )) - await client.list_exclusions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() - - @pytest.mark.asyncio async def test_list_exclusions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -8937,7 +7918,6 @@ async def test_list_exclusions_async(transport: str = 'grpc_asyncio', request_ty async def test_list_exclusions_async_from_dict(): await test_list_exclusions_async(request_type=dict) - def test_list_exclusions_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -9320,25 +8300,6 @@ def test_get_exclusion(request_type, transport: str = 'grpc'): assert response.disabled is True -def test_get_exclusion_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_exclusion), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_exclusion() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() - - def test_get_exclusion_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -9398,32 +8359,6 @@ def test_get_exclusion_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_exclusion_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_exclusion), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - )) - await client.get_exclusion() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() - - @pytest.mark.asyncio async def test_get_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -9500,7 +8435,6 @@ async def test_get_exclusion_async(transport: str = 'grpc_asyncio', request_type async def test_get_exclusion_async_from_dict(): await test_get_exclusion_async(request_type=dict) - def test_get_exclusion_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -9687,25 +8621,6 @@ def test_create_exclusion(request_type, transport: str = 'grpc'): assert response.disabled is True -def test_create_exclusion_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_exclusion), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_exclusion() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() - - def test_create_exclusion_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -9765,32 +8680,6 @@ def test_create_exclusion_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_exclusion_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_exclusion), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - )) - await client.create_exclusion() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() - - @pytest.mark.asyncio async def test_create_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -9867,7 +8756,6 @@ async def test_create_exclusion_async(transport: str = 'grpc_asyncio', request_t async def test_create_exclusion_async_from_dict(): await test_create_exclusion_async(request_type=dict) - def test_create_exclusion_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -10064,25 +8952,6 @@ def test_update_exclusion(request_type, transport: str = 'grpc'): assert response.disabled is True -def test_update_exclusion_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_exclusion), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_exclusion() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() - - def test_update_exclusion_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -10142,32 +9011,6 @@ def test_update_exclusion_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_exclusion_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_exclusion), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - )) - await client.update_exclusion() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() - - @pytest.mark.asyncio async def test_update_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10244,7 +9087,6 @@ async def test_update_exclusion_async(transport: str = 'grpc_asyncio', request_t async def test_update_exclusion_async_from_dict(): await test_update_exclusion_async(request_type=dict) - def test_update_exclusion_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -10442,25 +9284,6 @@ def test_delete_exclusion(request_type, transport: str = 'grpc'): assert response is None -def test_delete_exclusion_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_exclusion), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_exclusion() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() - - def test_delete_exclusion_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -10520,27 +9343,6 @@ def test_delete_exclusion_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_exclusion_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_exclusion), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_exclusion() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() - - @pytest.mark.asyncio async def test_delete_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10608,7 +9410,6 @@ async def test_delete_exclusion_async(transport: str = 'grpc_asyncio', request_t async def test_delete_exclusion_async_from_dict(): await test_delete_exclusion_async(request_type=dict) - def test_delete_exclusion_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -10795,25 +9596,6 @@ def test_get_cmek_settings(request_type, transport: str = 'grpc'): assert response.service_account_id == 'service_account_id_value' -def test_get_cmek_settings_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_cmek_settings), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_cmek_settings() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() - - def test_get_cmek_settings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -10873,32 +9655,6 @@ def test_get_cmek_settings_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_cmek_settings_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_cmek_settings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_key_version_name='kms_key_version_name_value', - service_account_id='service_account_id_value', - )) - await client.get_cmek_settings() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() - - @pytest.mark.asyncio async def test_get_cmek_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10975,7 +9731,6 @@ async def test_get_cmek_settings_async(transport: str = 'grpc_asyncio', request_ async def test_get_cmek_settings_async_from_dict(): await test_get_cmek_settings_async(request_type=dict) - def test_get_cmek_settings_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -11080,25 +9835,6 @@ def test_update_cmek_settings(request_type, transport: str = 'grpc'): assert response.service_account_id == 'service_account_id_value' -def test_update_cmek_settings_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_cmek_settings), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_cmek_settings() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() - - def test_update_cmek_settings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -11158,32 +9894,6 @@ def test_update_cmek_settings_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_cmek_settings_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_cmek_settings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_key_version_name='kms_key_version_name_value', - service_account_id='service_account_id_value', - )) - await client.update_cmek_settings() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() - - @pytest.mark.asyncio async def test_update_cmek_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11260,7 +9970,6 @@ async def test_update_cmek_settings_async(transport: str = 'grpc_asyncio', reque async def test_update_cmek_settings_async_from_dict(): await test_update_cmek_settings_async(request_type=dict) - def test_update_cmek_settings_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -11367,25 +10076,6 @@ def test_get_settings(request_type, transport: str = 'grpc'): assert response.disable_default_sink is True -def test_get_settings_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_settings), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_settings() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSettingsRequest() - - def test_get_settings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -11445,33 +10135,6 @@ def test_get_settings_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_settings_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_settings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_service_account_id='kms_service_account_id_value', - storage_location='storage_location_value', - disable_default_sink=True, - )) - await client.get_settings() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSettingsRequest() - - @pytest.mark.asyncio async def test_get_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11550,7 +10213,6 @@ async def test_get_settings_async(transport: str = 'grpc_asyncio', request_type= async def test_get_settings_async_from_dict(): await test_get_settings_async(request_type=dict) - def test_get_settings_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -11739,25 +10401,6 @@ def test_update_settings(request_type, transport: str = 'grpc'): assert response.disable_default_sink is True -def test_update_settings_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_settings), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_settings() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSettingsRequest() - - def test_update_settings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -11817,33 +10460,6 @@ def test_update_settings_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_settings_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_settings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_service_account_id='kms_service_account_id_value', - storage_location='storage_location_value', - disable_default_sink=True, - )) - await client.update_settings() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSettingsRequest() - - @pytest.mark.asyncio async def test_update_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11922,7 +10538,6 @@ async def test_update_settings_async(transport: str = 'grpc_asyncio', request_ty async def test_update_settings_async_from_dict(): await test_update_settings_async(request_type=dict) - def test_update_settings_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -12110,25 +10725,6 @@ def test_copy_log_entries(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_copy_log_entries_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.copy_log_entries), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.copy_log_entries() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CopyLogEntriesRequest() - - def test_copy_log_entries_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -12191,208 +10787,1861 @@ def test_copy_log_entries_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.copy_log_entries(request) + client.copy_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_copy_log_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.copy_log_entries in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.copy_log_entries] = mock_rpc + + request = {} + await client.copy_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.copy_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_copy_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging_config.CopyLogEntriesRequest): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.copy_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.copy_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.CopyLogEntriesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_copy_log_entries_async_from_dict(): + await test_copy_log_entries_async(request_type=dict) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ConfigServiceV2Client(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ConfigServiceV2GrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = ConfigServiceV2Client.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_buckets_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + call.return_value = logging_config.ListBucketsResponse() + client.list_buckets(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListBucketsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_bucket_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket), + '__call__') as call: + call.return_value = logging_config.LogBucket() + client.get_bucket(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_bucket_async_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket_async), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_bucket_async(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_bucket_async_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket_async), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_bucket_async(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_bucket_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket), + '__call__') as call: + call.return_value = logging_config.LogBucket() + client.create_bucket(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_bucket_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket), + '__call__') as call: + call.return_value = logging_config.LogBucket() + client.update_bucket(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_bucket_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_bucket), + '__call__') as call: + call.return_value = None + client.delete_bucket(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_undelete_bucket_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.undelete_bucket), + '__call__') as call: + call.return_value = None + client.undelete_bucket(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UndeleteBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_views_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_views), + '__call__') as call: + call.return_value = logging_config.ListViewsResponse() + client.list_views(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListViewsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_view_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_view), + '__call__') as call: + call.return_value = logging_config.LogView() + client.get_view(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetViewRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_view_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_view), + '__call__') as call: + call.return_value = logging_config.LogView() + client.create_view(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateViewRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_view_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_view), + '__call__') as call: + call.return_value = logging_config.LogView() + client.update_view(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateViewRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_view_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_view), + '__call__') as call: + call.return_value = None + client.delete_view(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteViewRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_sinks_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), + '__call__') as call: + call.return_value = logging_config.ListSinksResponse() + client.list_sinks(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListSinksRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_sink_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_sink), + '__call__') as call: + call.return_value = logging_config.LogSink() + client.get_sink(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetSinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_sink_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_sink), + '__call__') as call: + call.return_value = logging_config.LogSink() + client.create_sink(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateSinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_sink_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_sink), + '__call__') as call: + call.return_value = logging_config.LogSink() + client.update_sink(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateSinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_sink_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_sink), + '__call__') as call: + call.return_value = None + client.delete_sink(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteSinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_link_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_link), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_link_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_link), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_links_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_links), + '__call__') as call: + call.return_value = logging_config.ListLinksResponse() + client.list_links(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListLinksRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_link_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_link), + '__call__') as call: + call.return_value = logging_config.Link() + client.get_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_exclusions_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__') as call: + call.return_value = logging_config.ListExclusionsResponse() + client.list_exclusions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListExclusionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_exclusion_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_exclusion), + '__call__') as call: + call.return_value = logging_config.LogExclusion() + client.get_exclusion(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetExclusionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_exclusion_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_exclusion), + '__call__') as call: + call.return_value = logging_config.LogExclusion() + client.create_exclusion(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateExclusionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_exclusion_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + call.return_value = logging_config.LogExclusion() + client.update_exclusion(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateExclusionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_exclusion_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + call.return_value = None + client.delete_exclusion(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteExclusionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_cmek_settings_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), + '__call__') as call: + call.return_value = logging_config.CmekSettings() + client.get_cmek_settings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetCmekSettingsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_cmek_settings_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), + '__call__') as call: + call.return_value = logging_config.CmekSettings() + client.update_cmek_settings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateCmekSettingsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_settings_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_settings), + '__call__') as call: + call.return_value = logging_config.Settings() + client.get_settings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetSettingsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_settings_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_settings), + '__call__') as call: + call.return_value = logging_config.Settings() + client.update_settings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateSettingsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_copy_log_entries_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.copy_log_entries), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.copy_log_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CopyLogEntriesRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = ConfigServiceV2AsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_buckets_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse( + next_page_token='next_page_token_value', + )) + await client.list_buckets(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListBucketsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_bucket_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=['restricted_fields_value'], + )) + await client.get_bucket(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_bucket_async_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket_async), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_bucket_async(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_bucket_async_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket_async), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_bucket_async(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_bucket_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=['restricted_fields_value'], + )) + await client.create_bucket(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_bucket_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=['restricted_fields_value'], + )) + await client.update_bucket(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_bucket_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_bucket(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_undelete_bucket_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.undelete_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.undelete_bucket(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UndeleteBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_views_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_views), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse( + next_page_token='next_page_token_value', + )) + await client.list_views(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListViewsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_view_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + )) + await client.get_view(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetViewRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_view_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + )) + await client.create_view(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateViewRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_view_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + )) + await client.update_view(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateViewRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_view_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_view(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteViewRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_sinks_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse( + next_page_token='next_page_token_value', + )) + await client.list_sinks(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListSinksRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_sink_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + )) + await client.get_sink(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetSinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_sink_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + )) + await client.create_sink(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateSinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_sink_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + )) + await client.update_sink(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateSinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_sink_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_sink(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteSinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_link_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_link_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_links_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_links), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse( + next_page_token='next_page_token_value', + )) + await client.list_links(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListLinksRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_link_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link( + name='name_value', + description='description_value', + lifecycle_state=logging_config.LifecycleState.ACTIVE, + )) + await client.get_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_exclusions_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse( + next_page_token='next_page_token_value', + )) + await client.list_exclusions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListExclusionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_exclusion_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + )) + await client.get_exclusion(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetExclusionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_exclusion_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + )) + await client.create_exclusion(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateExclusionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_exclusion_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + )) + await client.update_exclusion(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateExclusionRequest() + + assert args[0] == request_msg - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_copy_log_entries_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +async def test_delete_exclusion_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), - transport='grpc_asyncio', + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.copy_log_entries), + type(client.transport.delete_exclusion), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.copy_log_entries() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_exclusion(request=None) + + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CopyLogEntriesRequest() - - -@pytest.mark.asyncio -async def test_copy_log_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + request_msg = logging_config.DeleteExclusionRequest() - # Ensure method has been cached - assert client._client._transport.copy_log_entries in client._client._transport._wrapped_methods + assert args[0] == request_msg - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.copy_log_entries] = mock_rpc - request = {} - await client.copy_log_entries(request) +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_cmek_settings_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_key_version_name='kms_key_version_name_value', + service_account_id='service_account_id_value', + )) + await client.get_cmek_settings(request=None) - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetCmekSettingsRequest() - await client.copy_log_entries(request) + assert args[0] == request_msg - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_copy_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging_config.CopyLogEntriesRequest): +async def test_update_cmek_settings_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), - transport=transport, + transport="grpc_asyncio", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.copy_log_entries), + type(client.transport.update_cmek_settings), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.copy_log_entries(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_key_version_name='kms_key_version_name_value', + service_account_id='service_account_id_value', + )) + await client.update_cmek_settings(request=None) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - request = logging_config.CopyLogEntriesRequest() - assert args[0] == request + request_msg = logging_config.UpdateCmekSettingsRequest() - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_copy_log_entries_async_from_dict(): - await test_copy_log_entries_async(request_type=dict) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), +async def test_get_settings_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - with pytest.raises(ValueError): - client = ConfigServiceV2Client( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - # It is an error to provide an api_key and a transport instance. - transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ConfigServiceV2Client( - client_options=options, - transport=transport, - ) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_service_account_id='kms_service_account_id_value', + storage_location='storage_location_value', + disable_default_sink=True, + )) + await client.get_settings(request=None) - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ConfigServiceV2Client( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetSettingsRequest() - # It is an error to provide scopes and a transport instance. - transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ConfigServiceV2Client( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) + assert args[0] == request_msg -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_settings_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - client = ConfigServiceV2Client(transport=transport) - assert client.transport is transport -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_service_account_id='kms_service_account_id_value', + storage_location='storage_location_value', + disable_default_sink=True, + )) + await client.update_settings(request=None) - transport = transports.ConfigServiceV2GrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateSettingsRequest() -@pytest.mark.parametrize("transport_class", [ - transports.ConfigServiceV2GrpcTransport, - transports.ConfigServiceV2GrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + assert args[0] == request_msg -def test_transport_kind_grpc(): - transport = ConfigServiceV2Client.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_copy_log_entries_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - assert transport.kind == "grpc" + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.copy_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.copy_log_entries(request=None) -def test_transport_kind_grpc_asyncio(): - transport = ConfigServiceV2AsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CopyLogEntriesRequest() + + assert args[0] == request_msg def test_transport_grpc_default(): @@ -13071,17 +13320,6 @@ def test_client_with_default_client_info(): ) prep.assert_called_once_with(client_info) -@pytest.mark.asyncio -async def test_transport_close_async(): - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - def test_cancel_operation(transport: str = "grpc"): client = ConfigServiceV2Client( @@ -13470,20 +13708,28 @@ async def test_list_operations_from_dict_async(): call.assert_called() -def test_transport_close(): - transports = { - "grpc": "_grpc_channel", - } +def test_transport_close_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() - for transport, close_name in transports.items(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() def test_client_ctx(): transports = [ diff --git a/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 3c65de74b6..487dd0c7cd 100755 --- a/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -70,6 +70,13 @@ } CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -186,6 +193,7 @@ def test__get_universe_domain(): LoggingServiceV2Client._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." + @pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ (401, CRED_INFO_JSON, True), (403, CRED_INFO_JSON, True), @@ -840,25 +848,6 @@ def test_delete_log(request_type, transport: str = 'grpc'): assert response is None -def test_delete_log_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_log() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.DeleteLogRequest() - - def test_delete_log_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -918,27 +907,6 @@ def test_delete_log_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_log_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_log() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.DeleteLogRequest() - - @pytest.mark.asyncio async def test_delete_log_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1006,7 +974,6 @@ async def test_delete_log_async(transport: str = 'grpc_asyncio', request_type=lo async def test_delete_log_async_from_dict(): await test_delete_log_async(request_type=dict) - def test_delete_log_field_headers(): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1185,25 +1152,6 @@ def test_write_log_entries(request_type, transport: str = 'grpc'): assert isinstance(response, logging.WriteLogEntriesResponse) -def test_write_log_entries_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.write_log_entries), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.write_log_entries() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.WriteLogEntriesRequest() - - def test_write_log_entries_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1263,28 +1211,6 @@ def test_write_log_entries_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_write_log_entries_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.write_log_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.WriteLogEntriesResponse( - )) - await client.write_log_entries() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.WriteLogEntriesRequest() - - @pytest.mark.asyncio async def test_write_log_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1501,25 +1427,6 @@ def test_list_log_entries(request_type, transport: str = 'grpc'): assert response.next_page_token == 'next_page_token_value' -def test_list_log_entries_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_log_entries() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogEntriesRequest() - - def test_list_log_entries_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1583,29 +1490,6 @@ def test_list_log_entries_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_log_entries_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogEntriesResponse( - next_page_token='next_page_token_value', - )) - await client.list_log_entries() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogEntriesRequest() - - @pytest.mark.asyncio async def test_list_log_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2005,25 +1889,6 @@ def test_list_monitored_resource_descriptors(request_type, transport: str = 'grp assert response.next_page_token == 'next_page_token_value' -def test_list_monitored_resource_descriptors_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_monitored_resource_descriptors() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() - - def test_list_monitored_resource_descriptors_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2083,29 +1948,6 @@ def test_list_monitored_resource_descriptors_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_monitored_resource_descriptors_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListMonitoredResourceDescriptorsResponse( - next_page_token='next_page_token_value', - )) - await client.list_monitored_resource_descriptors() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() - - @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2405,25 +2247,6 @@ def test_list_logs(request_type, transport: str = 'grpc'): assert response.next_page_token == 'next_page_token_value' -def test_list_logs_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_logs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogsRequest() - - def test_list_logs_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2485,30 +2308,6 @@ def test_list_logs_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_logs_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogsResponse( - log_names=['log_names_value'], - next_page_token='next_page_token_value', - )) - await client.list_logs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogsRequest() - - @pytest.mark.asyncio async def test_list_logs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2581,7 +2380,6 @@ async def test_list_logs_async(transport: str = 'grpc_asyncio', request_type=log async def test_list_logs_async_from_dict(): await test_list_logs_async(request_type=dict) - def test_list_logs_field_headers(): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2988,7 +2786,6 @@ def test_tail_log_entries_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 - @pytest.mark.asyncio async def test_tail_log_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3152,6 +2949,129 @@ def test_transport_kind_grpc(): assert transport.kind == "grpc" +def test_initialize_client_w_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_log_empty_call_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_log), + '__call__') as call: + call.return_value = None + client.delete_log(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.DeleteLogRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_write_log_entries_empty_call_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), + '__call__') as call: + call.return_value = logging.WriteLogEntriesResponse() + client.write_log_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.WriteLogEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_log_entries_empty_call_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__') as call: + call.return_value = logging.ListLogEntriesResponse() + client.list_log_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.ListLogEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_monitored_resource_descriptors_empty_call_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + call.return_value = logging.ListMonitoredResourceDescriptorsResponse() + client.list_monitored_resource_descriptors(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.ListMonitoredResourceDescriptorsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_logs_empty_call_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + call.return_value = logging.ListLogsResponse() + client.list_logs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.ListLogsRequest() + + assert args[0] == request_msg + + def test_transport_kind_grpc_asyncio(): transport = LoggingServiceV2AsyncClient.get_transport_class("grpc_asyncio")( credentials=async_anonymous_credentials() @@ -3159,6 +3079,147 @@ def test_transport_kind_grpc_asyncio(): assert transport.kind == "grpc_asyncio" +def test_initialize_client_w_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_log_empty_call_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_log), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_log(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.DeleteLogRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_write_log_entries_empty_call_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.WriteLogEntriesResponse( + )) + await client.write_log_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.WriteLogEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_log_entries_empty_call_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogEntriesResponse( + next_page_token='next_page_token_value', + )) + await client.list_log_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.ListLogEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_empty_call_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListMonitoredResourceDescriptorsResponse( + next_page_token='next_page_token_value', + )) + await client.list_monitored_resource_descriptors(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.ListMonitoredResourceDescriptorsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_logs_empty_call_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogsResponse( + log_names=['log_names_value'], + next_page_token='next_page_token_value', + )) + await client.list_logs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.ListLogsRequest() + + assert args[0] == request_msg + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = LoggingServiceV2Client( @@ -3653,17 +3714,6 @@ def test_client_with_default_client_info(): ) prep.assert_called_once_with(client_info) -@pytest.mark.asyncio -async def test_transport_close_async(): - client = LoggingServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - def test_cancel_operation(transport: str = "grpc"): client = LoggingServiceV2Client( @@ -4052,20 +4102,28 @@ async def test_list_operations_from_dict_async(): call.assert_called() -def test_transport_close(): - transports = { - "grpc": "_grpc_channel", - } +def test_transport_close_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() - for transport, close_name in transports.items(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() def test_client_ctx(): transports = [ diff --git a/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 345ffac34d..7635fc1c78 100755 --- a/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -68,6 +68,13 @@ } CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -184,6 +191,7 @@ def test__get_universe_domain(): MetricsServiceV2Client._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." + @pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ (401, CRED_INFO_JSON, True), (403, CRED_INFO_JSON, True), @@ -841,25 +849,6 @@ def test_list_log_metrics(request_type, transport: str = 'grpc'): assert response.next_page_token == 'next_page_token_value' -def test_list_log_metrics_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_log_metrics() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.ListLogMetricsRequest() - - def test_list_log_metrics_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -921,29 +910,6 @@ def test_list_log_metrics_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_log_metrics_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse( - next_page_token='next_page_token_value', - )) - await client.list_log_metrics() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.ListLogMetricsRequest() - - @pytest.mark.asyncio async def test_list_log_metrics_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1014,7 +980,6 @@ async def test_list_log_metrics_async(transport: str = 'grpc_asyncio', request_t async def test_list_log_metrics_async_from_dict(): await test_list_log_metrics_async(request_type=dict) - def test_list_log_metrics_field_headers(): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1403,25 +1368,6 @@ def test_get_log_metric(request_type, transport: str = 'grpc'): assert response.version == logging_metrics.LogMetric.ApiVersion.V1 -def test_get_log_metric_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_log_metric), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_log_metric() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.GetLogMetricRequest() - - def test_get_log_metric_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1481,35 +1427,6 @@ def test_get_log_metric_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_log_metric_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_log_metric), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', - disabled=True, - value_extractor='value_extractor_value', - version=logging_metrics.LogMetric.ApiVersion.V1, - )) - await client.get_log_metric() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.GetLogMetricRequest() - - @pytest.mark.asyncio async def test_get_log_metric_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1592,7 +1509,6 @@ async def test_get_log_metric_async(transport: str = 'grpc_asyncio', request_typ async def test_get_log_metric_async_from_dict(): await test_get_log_metric_async(request_type=dict) - def test_get_log_metric_field_headers(): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1785,25 +1701,6 @@ def test_create_log_metric(request_type, transport: str = 'grpc'): assert response.version == logging_metrics.LogMetric.ApiVersion.V1 -def test_create_log_metric_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_log_metric), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_log_metric() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.CreateLogMetricRequest() - - def test_create_log_metric_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1863,35 +1760,6 @@ def test_create_log_metric_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_log_metric_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_log_metric), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', - disabled=True, - value_extractor='value_extractor_value', - version=logging_metrics.LogMetric.ApiVersion.V1, - )) - await client.create_log_metric() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.CreateLogMetricRequest() - - @pytest.mark.asyncio async def test_create_log_metric_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1974,7 +1842,6 @@ async def test_create_log_metric_async(transport: str = 'grpc_asyncio', request_ async def test_create_log_metric_async_from_dict(): await test_create_log_metric_async(request_type=dict) - def test_create_log_metric_field_headers(): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2177,25 +2044,6 @@ def test_update_log_metric(request_type, transport: str = 'grpc'): assert response.version == logging_metrics.LogMetric.ApiVersion.V1 -def test_update_log_metric_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_log_metric), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_log_metric() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.UpdateLogMetricRequest() - - def test_update_log_metric_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2255,35 +2103,6 @@ def test_update_log_metric_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_log_metric_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_log_metric), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', - disabled=True, - value_extractor='value_extractor_value', - version=logging_metrics.LogMetric.ApiVersion.V1, - )) - await client.update_log_metric() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.UpdateLogMetricRequest() - - @pytest.mark.asyncio async def test_update_log_metric_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2366,7 +2185,6 @@ async def test_update_log_metric_async(transport: str = 'grpc_asyncio', request_ async def test_update_log_metric_async_from_dict(): await test_update_log_metric_async(request_type=dict) - def test_update_log_metric_field_headers(): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2554,25 +2372,6 @@ def test_delete_log_metric(request_type, transport: str = 'grpc'): assert response is None -def test_delete_log_metric_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log_metric), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_log_metric() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.DeleteLogMetricRequest() - - def test_delete_log_metric_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2632,27 +2431,6 @@ def test_delete_log_metric_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_log_metric_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log_metric), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_log_metric() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.DeleteLogMetricRequest() - - @pytest.mark.asyncio async def test_delete_log_metric_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2720,7 +2498,6 @@ async def test_delete_log_metric_async(transport: str = 'grpc_asyncio', request_ async def test_delete_log_metric_async_from_dict(): await test_delete_log_metric_async(request_type=dict) - def test_delete_log_metric_field_headers(): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2959,6 +2736,129 @@ def test_transport_kind_grpc(): assert transport.kind == "grpc" +def test_initialize_client_w_grpc(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_log_metrics_empty_call_grpc(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_log_metrics), + '__call__') as call: + call.return_value = logging_metrics.ListLogMetricsResponse() + client.list_log_metrics(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.ListLogMetricsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_log_metric_empty_call_grpc(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_log_metric), + '__call__') as call: + call.return_value = logging_metrics.LogMetric() + client.get_log_metric(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.GetLogMetricRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_log_metric_empty_call_grpc(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), + '__call__') as call: + call.return_value = logging_metrics.LogMetric() + client.create_log_metric(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.CreateLogMetricRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_log_metric_empty_call_grpc(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), + '__call__') as call: + call.return_value = logging_metrics.LogMetric() + client.update_log_metric(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.UpdateLogMetricRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_log_metric_empty_call_grpc(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), + '__call__') as call: + call.return_value = None + client.delete_log_metric(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.DeleteLogMetricRequest() + + assert args[0] == request_msg + + def test_transport_kind_grpc_asyncio(): transport = MetricsServiceV2AsyncClient.get_transport_class("grpc_asyncio")( credentials=async_anonymous_credentials() @@ -2966,6 +2866,165 @@ def test_transport_kind_grpc_asyncio(): assert transport.kind == "grpc_asyncio" +def test_initialize_client_w_grpc_asyncio(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_log_metrics_empty_call_grpc_asyncio(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_log_metrics), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse( + next_page_token='next_page_token_value', + )) + await client.list_log_metrics(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.ListLogMetricsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_log_metric_empty_call_grpc_asyncio(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + bucket_name='bucket_name_value', + disabled=True, + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + )) + await client.get_log_metric(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.GetLogMetricRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_log_metric_empty_call_grpc_asyncio(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + bucket_name='bucket_name_value', + disabled=True, + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + )) + await client.create_log_metric(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.CreateLogMetricRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_log_metric_empty_call_grpc_asyncio(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + bucket_name='bucket_name_value', + disabled=True, + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + )) + await client.update_log_metric(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.UpdateLogMetricRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_log_metric_empty_call_grpc_asyncio(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_log_metric(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.DeleteLogMetricRequest() + + assert args[0] == request_msg + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = MetricsServiceV2Client( @@ -3459,17 +3518,6 @@ def test_client_with_default_client_info(): ) prep.assert_called_once_with(client_info) -@pytest.mark.asyncio -async def test_transport_close_async(): - client = MetricsServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - def test_cancel_operation(transport: str = "grpc"): client = MetricsServiceV2Client( @@ -3858,20 +3906,28 @@ async def test_list_operations_from_dict_async(): call.assert_called() -def test_transport_close(): - transports = { - "grpc": "_grpc_channel", - } +def test_transport_close_grpc(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() - for transport, close_name in transports.items(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() def test_client_ctx(): transports = [ diff --git a/tests/integration/goldens/redis/docs/index.rst b/tests/integration/goldens/redis/docs/index.rst index f7ccd42cd0..0b346d85a9 100755 --- a/tests/integration/goldens/redis/docs/index.rst +++ b/tests/integration/goldens/redis/docs/index.rst @@ -3,5 +3,5 @@ API Reference .. toctree:: :maxdepth: 2 - redis_v1/services - redis_v1/types + redis_v1/services_ + redis_v1/types_ diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 6bf9755041..2ba2cb14de 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union @@ -45,6 +46,13 @@ from .transports.grpc_asyncio import CloudRedisGrpcAsyncIOTransport from .client import CloudRedisClient +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) class CloudRedisAsyncClient: """Configures and manages Cloud Memorystore for Redis instances @@ -255,13 +263,27 @@ def __init__(self, *, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.redis_v1.CloudRedisAsyncClient`.", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "credentialsType": None, + } + ) + async def list_instances(self, request: Optional[Union[cloud_redis.ListInstancesRequest, dict]] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListInstancesAsyncPager: r"""Lists all Redis instances owned by a project in either the specified location (region) or all locations. @@ -317,8 +339,10 @@ async def sample_list_instances(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.redis_v1.services.cloud_redis.pagers.ListInstancesAsyncPager: @@ -390,7 +414,7 @@ async def get_instance(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> cloud_redis.Instance: r"""Gets the details of a specific Redis instance. @@ -435,8 +459,10 @@ async def sample_get_instance(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.redis_v1.types.Instance: @@ -492,7 +518,7 @@ async def get_instance_auth_string(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> cloud_redis.InstanceAuthString: r"""Gets the AUTH string for a Redis instance. If AUTH is not enabled for the instance the response will be empty. @@ -540,8 +566,10 @@ async def sample_get_instance_auth_string(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.redis_v1.types.InstanceAuthString: @@ -599,7 +627,7 @@ async def create_instance(self, instance: Optional[cloud_redis.Instance] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Creates a Redis instance based on the specified tier and memory size. @@ -688,8 +716,10 @@ async def sample_create_instance(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -763,7 +793,7 @@ async def update_instance(self, instance: Optional[cloud_redis.Instance] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Updates the metadata and configuration of a specific Redis instance. @@ -836,8 +866,10 @@ async def sample_update_instance(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -909,7 +941,7 @@ async def upgrade_instance(self, redis_version: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Upgrades Redis instance to the newer Redis version specified in the request. @@ -967,8 +999,10 @@ async def sample_upgrade_instance(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -1040,7 +1074,7 @@ async def import_instance(self, input_config: Optional[cloud_redis.InputConfig] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Import a Redis RDB snapshot file from Cloud Storage into a Redis instance. @@ -1108,8 +1142,10 @@ async def sample_import_instance(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -1181,7 +1217,7 @@ async def export_instance(self, output_config: Optional[cloud_redis.OutputConfig] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Export Redis instance data into a Redis RDB format file in Cloud Storage. @@ -1246,8 +1282,10 @@ async def sample_export_instance(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -1319,7 +1357,7 @@ async def failover_instance(self, data_protection_mode: Optional[cloud_redis.FailoverInstanceRequest.DataProtectionMode] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Initiates a failover of the primary node to current replica node for a specific STANDARD tier Cloud @@ -1378,8 +1416,10 @@ async def sample_failover_instance(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -1450,7 +1490,7 @@ async def delete_instance(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Deletes a specific Redis instance. Instance stops serving and data is deleted. @@ -1500,8 +1540,10 @@ async def sample_delete_instance(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -1579,7 +1621,7 @@ async def reschedule_maintenance(self, schedule_time: Optional[timestamp_pb2.Timestamp] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Reschedule maintenance for a given instance in a given project and location. @@ -1645,8 +1687,10 @@ async def sample_reschedule_maintenance(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -1719,7 +1763,7 @@ async def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -1730,8 +1774,10 @@ async def list_operations( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -1744,11 +1790,7 @@ async def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -1773,7 +1815,7 @@ async def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1784,8 +1826,10 @@ async def get_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -1798,11 +1842,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -1827,7 +1867,7 @@ async def delete_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a long-running operation. @@ -1843,8 +1883,10 @@ async def delete_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -1856,11 +1898,7 @@ async def delete_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -1881,7 +1919,7 @@ async def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1896,8 +1934,10 @@ async def cancel_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -1909,11 +1949,7 @@ async def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -1928,13 +1964,71 @@ async def cancel_operation( # Send the request. await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + async def wait_operation( + self, + request: Optional[operations_pb2.WaitOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Waits until the specified long-running operation is done or reaches at most + a specified timeout, returning the latest state. + + If the operation is already done, the latest state is immediately returned. + If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC + timeout is used. If the server does not support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.WaitOperationRequest`): + The request object. Request message for + `WaitOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.WaitOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.wait_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + async def get_location( self, request: Optional[locations_pb2.GetLocationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: r"""Gets information about a location. @@ -1945,8 +2039,10 @@ async def get_location( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.location_pb2.Location: Location object. @@ -1959,11 +2055,7 @@ async def get_location( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_location, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_location] # Certain fields should be provided within the metadata header; # add these here. @@ -1988,7 +2080,7 @@ async def list_locations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: r"""Lists information about the supported locations for this service. @@ -1999,8 +2091,10 @@ async def list_locations( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.location_pb2.ListLocationsResponse: Response message for ``ListLocations`` method. @@ -2013,11 +2107,7 @@ async def list_locations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_locations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] # Certain fields should be provided within the metadata header; # add these here. diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 5bdfe97873..9281b5990c 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -16,6 +16,7 @@ from collections import OrderedDict from http import HTTPStatus import json +import logging as std_logging import os import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast @@ -38,6 +39,14 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.location import locations_pb2 # type: ignore @@ -51,7 +60,12 @@ from .transports.grpc import CloudRedisGrpcTransport from .transports.grpc_asyncio import CloudRedisGrpcAsyncIOTransport from .transports.rest import CloudRedisRestTransport -from .transports.rest_asyncio import AsyncCloudRedisRestTransport +try: + from .transports.rest_asyncio import AsyncCloudRedisRestTransport + HAS_ASYNC_REST_DEPENDENCIES = True +except ImportError as e: # pragma: NO COVER + HAS_ASYNC_REST_DEPENDENCIES = False + ASYNC_REST_EXCEPTION = e class CloudRedisClientMeta(type): @@ -65,7 +79,8 @@ class CloudRedisClientMeta(type): _transport_registry["grpc"] = CloudRedisGrpcTransport _transport_registry["grpc_asyncio"] = CloudRedisGrpcAsyncIOTransport _transport_registry["rest"] = CloudRedisRestTransport - _transport_registry["rest_asyncio"] = AsyncCloudRedisRestTransport + if HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER + _transport_registry["rest_asyncio"] = AsyncCloudRedisRestTransport def get_transport_class(cls, label: Optional[str] = None, @@ -80,6 +95,8 @@ def get_transport_class(cls, The transport class to use. """ # If a specific transport is requested, return that one. + if label == "rest_asyncio" and not HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER + raise ASYNC_REST_EXCEPTION if label: return cls._transport_registry[label] @@ -418,33 +435,6 @@ def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_ raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes(client_universe: str, - credentials: ga_credentials.Credentials) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = CloudRedisClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError("The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default.") - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -454,9 +444,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = (self._is_universe_domain_valid or - CloudRedisClient._compare_universes(self.universe_domain, self.transport._credentials)) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True def _add_cred_info_for_auth_errors( self, @@ -571,6 +561,10 @@ def __init__(self, *, # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError("client_options.api_key and credentials are mutually exclusive") @@ -600,16 +594,38 @@ def __init__(self, *, self._use_mtls_endpoint)) if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - transport_init: Union[Type[CloudRedisTransport], Callable[..., CloudRedisTransport]] = ( CloudRedisClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudRedisTransport], transport) ) + + if "rest_asyncio" in str(transport_init): + unsupported_params = { + "google.api_core.client_options.ClientOptions.credentials_file": self._client_options.credentials_file, + "google.api_core.client_options.ClientOptions.scopes": self._client_options.scopes, + "google.api_core.client_options.ClientOptions.quota_project_id": self._client_options.quota_project_id, + "google.api_core.client_options.ClientOptions.client_cert_source": self._client_options.client_cert_source, + "google.api_core.client_options.ClientOptions.api_audience": self._client_options.api_audience, + + } + provided_unsupported_params = [name for name, value in unsupported_params.items() if value is not None] + if provided_unsupported_params: + raise core_exceptions.AsyncRestUnsupportedParameterError( # type: ignore + f"The following provided parameters are not supported for `transport=rest_asyncio`: {', '.join(provided_unsupported_params)}" + ) + self._transport = transport_init( + credentials=credentials, + host=self._api_endpoint, + client_info=client_info, + ) + return + + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + # initialize with the provided callable or the passed in class self._transport = transport_init( credentials=credentials, @@ -623,13 +639,28 @@ def __init__(self, *, api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.redis_v1.CloudRedisClient`.", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "credentialsType": None, + } + ) + def list_instances(self, request: Optional[Union[cloud_redis.ListInstancesRequest, dict]] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListInstancesPager: r"""Lists all Redis instances owned by a project in either the specified location (region) or all locations. @@ -685,8 +716,10 @@ def sample_list_instances(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.redis_v1.services.cloud_redis.pagers.ListInstancesPager: @@ -718,7 +751,7 @@ def sample_list_instances(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_instances] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -757,7 +790,7 @@ def get_instance(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> cloud_redis.Instance: r"""Gets the details of a specific Redis instance. @@ -802,8 +835,10 @@ def sample_get_instance(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.redis_v1.types.Instance: @@ -830,7 +865,7 @@ def sample_get_instance(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_instance] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -858,7 +893,7 @@ def get_instance_auth_string(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> cloud_redis.InstanceAuthString: r"""Gets the AUTH string for a Redis instance. If AUTH is not enabled for the instance the response will be empty. @@ -906,8 +941,10 @@ def sample_get_instance_auth_string(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.redis_v1.types.InstanceAuthString: @@ -934,7 +971,7 @@ def sample_get_instance_auth_string(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_instance_auth_string] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -964,7 +1001,7 @@ def create_instance(self, instance: Optional[cloud_redis.Instance] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Creates a Redis instance based on the specified tier and memory size. @@ -1053,8 +1090,10 @@ def sample_create_instance(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1090,7 +1129,7 @@ def sample_create_instance(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_instance] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1127,7 +1166,7 @@ def update_instance(self, instance: Optional[cloud_redis.Instance] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Updates the metadata and configuration of a specific Redis instance. @@ -1200,8 +1239,10 @@ def sample_update_instance(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1235,7 +1276,7 @@ def sample_update_instance(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_instance] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1272,7 +1313,7 @@ def upgrade_instance(self, redis_version: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Upgrades Redis instance to the newer Redis version specified in the request. @@ -1330,8 +1371,10 @@ def sample_upgrade_instance(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1365,7 +1408,7 @@ def sample_upgrade_instance(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.upgrade_instance] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1402,7 +1445,7 @@ def import_instance(self, input_config: Optional[cloud_redis.InputConfig] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Import a Redis RDB snapshot file from Cloud Storage into a Redis instance. @@ -1470,8 +1513,10 @@ def sample_import_instance(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1505,7 +1550,7 @@ def sample_import_instance(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.import_instance] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1542,7 +1587,7 @@ def export_instance(self, output_config: Optional[cloud_redis.OutputConfig] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Export Redis instance data into a Redis RDB format file in Cloud Storage. @@ -1607,8 +1652,10 @@ def sample_export_instance(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1642,7 +1689,7 @@ def sample_export_instance(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.export_instance] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1679,7 +1726,7 @@ def failover_instance(self, data_protection_mode: Optional[cloud_redis.FailoverInstanceRequest.DataProtectionMode] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Initiates a failover of the primary node to current replica node for a specific STANDARD tier Cloud @@ -1738,8 +1785,10 @@ def sample_failover_instance(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1773,7 +1822,7 @@ def sample_failover_instance(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.failover_instance] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1809,7 +1858,7 @@ def delete_instance(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Deletes a specific Redis instance. Instance stops serving and data is deleted. @@ -1859,8 +1908,10 @@ def sample_delete_instance(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1899,7 +1950,7 @@ def sample_delete_instance(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_instance] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1937,7 +1988,7 @@ def reschedule_maintenance(self, schedule_time: Optional[timestamp_pb2.Timestamp] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Reschedule maintenance for a given instance in a given project and location. @@ -2003,8 +2054,10 @@ def sample_reschedule_maintenance(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -2040,7 +2093,7 @@ def sample_reschedule_maintenance(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.reschedule_maintenance] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2089,7 +2142,7 @@ def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -2100,8 +2153,10 @@ def list_operations( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -2114,11 +2169,7 @@ def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -2147,7 +2198,7 @@ def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -2158,8 +2209,10 @@ def get_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -2172,11 +2225,7 @@ def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -2205,7 +2254,7 @@ def delete_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a long-running operation. @@ -2221,8 +2270,10 @@ def delete_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -2234,11 +2285,7 @@ def delete_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.delete_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -2259,7 +2306,7 @@ def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -2274,8 +2321,10 @@ def cancel_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -2287,11 +2336,7 @@ def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -2306,13 +2351,71 @@ def cancel_operation( # Send the request. rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + def wait_operation( + self, + request: Optional[operations_pb2.WaitOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Waits until the specified long-running operation is done or reaches at most + a specified timeout, returning the latest state. + + If the operation is already done, the latest state is immediately returned. + If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC + timeout is used. If the server does not support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.WaitOperationRequest`): + The request object. Request message for + `WaitOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.WaitOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.wait_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + def get_location( self, request: Optional[locations_pb2.GetLocationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: r"""Gets information about a location. @@ -2323,8 +2426,10 @@ def get_location( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.location_pb2.Location: Location object. @@ -2337,11 +2442,7 @@ def get_location( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_location, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_location] # Certain fields should be provided within the metadata header; # add these here. @@ -2370,7 +2471,7 @@ def list_locations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: r"""Lists information about the supported locations for this service. @@ -2381,8 +2482,10 @@ def list_locations( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.location_pb2.ListLocationsResponse: Response message for ``ListLocations`` method. @@ -2395,11 +2498,7 @@ def list_locations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_locations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.list_locations] # Certain fields should be provided within the metadata header; # add these here. diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py index 55a7a66f95..e3d2c05b83 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py @@ -51,7 +51,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -64,8 +64,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = cloud_redis.ListInstancesRequest(request) @@ -117,7 +119,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -130,8 +132,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = cloud_redis.ListInstancesRequest(request) diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py index 889648d305..563cd5dd76 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py @@ -14,13 +14,22 @@ # limitations under the License. # from collections import OrderedDict -from typing import Dict, Type +from typing import Dict, Type, Tuple from .base import CloudRedisTransport from .grpc import CloudRedisGrpcTransport from .grpc_asyncio import CloudRedisGrpcAsyncIOTransport from .rest import CloudRedisRestTransport from .rest import CloudRedisRestInterceptor +ASYNC_REST_CLASSES: Tuple[str, ...] +try: + from .rest_asyncio import AsyncCloudRedisRestTransport + from .rest_asyncio import AsyncCloudRedisRestInterceptor + ASYNC_REST_CLASSES = ('AsyncCloudRedisRestTransport', 'AsyncCloudRedisRestInterceptor') + HAS_REST_ASYNC = True +except ImportError: # pragma: NO COVER + ASYNC_REST_CLASSES = () + HAS_REST_ASYNC = False # Compile a registry of transports. @@ -28,6 +37,8 @@ _transport_registry['grpc'] = CloudRedisGrpcTransport _transport_registry['grpc_asyncio'] = CloudRedisGrpcAsyncIOTransport _transport_registry['rest'] = CloudRedisRestTransport +if HAS_REST_ASYNC: # pragma: NO COVER + _transport_registry['rest_asyncio'] = AsyncCloudRedisRestTransport __all__ = ( 'CloudRedisTransport', @@ -35,4 +46,4 @@ 'CloudRedisGrpcAsyncIOTransport', 'CloudRedisRestTransport', 'CloudRedisRestInterceptor', -) +) + ASYNC_REST_CLASSES diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index b8a9fa84b5..57832b4d91 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -177,6 +177,41 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + self.wait_operation: gapic_v1.method.wrap_method( + self.wait_operation, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -328,6 +363,15 @@ def delete_operation( ]: raise NotImplementedError() + @property + def wait_operation( + self, + ) -> Callable[ + [operations_pb2.WaitOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def get_location(self, ) -> Callable[ diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index 87b5e7e0c0..76c07f74bb 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -22,14 +25,85 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.types import cloud_redis from google.longrunning import operations_pb2 # type: ignore from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class CloudRedisGrpcTransport(CloudRedisTransport): """gRPC backend transport for CloudRedis. @@ -201,7 +275,10 @@ def __init__(self, *, ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -265,7 +342,7 @@ def operations_client(self) -> operations_v1.OperationsClient: # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsClient( - self.grpc_channel + self._logged_channel ) # Return the client from cache. @@ -299,7 +376,7 @@ def list_instances(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_instances' not in self._stubs: - self._stubs['list_instances'] = self.grpc_channel.unary_unary( + self._stubs['list_instances'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/ListInstances', request_serializer=cloud_redis.ListInstancesRequest.serialize, response_deserializer=cloud_redis.ListInstancesResponse.deserialize, @@ -325,7 +402,7 @@ def get_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_instance' not in self._stubs: - self._stubs['get_instance'] = self.grpc_channel.unary_unary( + self._stubs['get_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/GetInstance', request_serializer=cloud_redis.GetInstanceRequest.serialize, response_deserializer=cloud_redis.Instance.deserialize, @@ -354,7 +431,7 @@ def get_instance_auth_string(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_instance_auth_string' not in self._stubs: - self._stubs['get_instance_auth_string'] = self.grpc_channel.unary_unary( + self._stubs['get_instance_auth_string'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/GetInstanceAuthString', request_serializer=cloud_redis.GetInstanceAuthStringRequest.serialize, response_deserializer=cloud_redis.InstanceAuthString.deserialize, @@ -393,7 +470,7 @@ def create_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_instance' not in self._stubs: - self._stubs['create_instance'] = self.grpc_channel.unary_unary( + self._stubs['create_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/CreateInstance', request_serializer=cloud_redis.CreateInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -424,7 +501,7 @@ def update_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_instance' not in self._stubs: - self._stubs['update_instance'] = self.grpc_channel.unary_unary( + self._stubs['update_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/UpdateInstance', request_serializer=cloud_redis.UpdateInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -451,7 +528,7 @@ def upgrade_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'upgrade_instance' not in self._stubs: - self._stubs['upgrade_instance'] = self.grpc_channel.unary_unary( + self._stubs['upgrade_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/UpgradeInstance', request_serializer=cloud_redis.UpgradeInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -485,7 +562,7 @@ def import_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'import_instance' not in self._stubs: - self._stubs['import_instance'] = self.grpc_channel.unary_unary( + self._stubs['import_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/ImportInstance', request_serializer=cloud_redis.ImportInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -516,7 +593,7 @@ def export_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'export_instance' not in self._stubs: - self._stubs['export_instance'] = self.grpc_channel.unary_unary( + self._stubs['export_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/ExportInstance', request_serializer=cloud_redis.ExportInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -544,7 +621,7 @@ def failover_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'failover_instance' not in self._stubs: - self._stubs['failover_instance'] = self.grpc_channel.unary_unary( + self._stubs['failover_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/FailoverInstance', request_serializer=cloud_redis.FailoverInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -571,7 +648,7 @@ def delete_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_instance' not in self._stubs: - self._stubs['delete_instance'] = self.grpc_channel.unary_unary( + self._stubs['delete_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/DeleteInstance', request_serializer=cloud_redis.DeleteInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -598,7 +675,7 @@ def reschedule_maintenance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'reschedule_maintenance' not in self._stubs: - self._stubs['reschedule_maintenance'] = self.grpc_channel.unary_unary( + self._stubs['reschedule_maintenance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/RescheduleMaintenance', request_serializer=cloud_redis.RescheduleMaintenanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -606,7 +683,7 @@ def reschedule_maintenance(self) -> Callable[ return self._stubs['reschedule_maintenance'] def close(self): - self.grpc_channel.close() + self._logged_channel.close() @property def delete_operation( @@ -619,7 +696,7 @@ def delete_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + self._stubs["delete_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/DeleteOperation", request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, response_deserializer=None, @@ -637,13 +714,31 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, ) return self._stubs["cancel_operation"] + @property + def wait_operation( + self, + ) -> Callable[[operations_pb2.WaitOperationRequest], None]: + r"""Return a callable for the wait_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "wait_operation" not in self._stubs: + self._stubs["wait_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/WaitOperation", + request_serializer=operations_pb2.WaitOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["wait_operation"] + @property def get_operation( self, @@ -655,7 +750,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -673,7 +768,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, @@ -691,7 +786,7 @@ def list_locations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( + self._stubs["list_locations"] = self._logged_channel.unary_unary( "/google.cloud.location.Locations/ListLocations", request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, response_deserializer=locations_pb2.ListLocationsResponse.FromString, @@ -709,7 +804,7 @@ def get_location( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( + self._stubs["get_location"] = self._logged_channel.unary_unary( "/google.cloud.location.Locations/GetLocation", request_serializer=locations_pb2.GetLocationRequest.SerializeToString, response_deserializer=locations_pb2.Location.FromString, diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index 364bd7e950..aa699fc707 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -14,6 +14,9 @@ # limitations under the License. # import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -24,8 +27,11 @@ from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.location import locations_pb2 # type: ignore @@ -34,6 +40,73 @@ from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO from .grpc import CloudRedisGrpcTransport +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class CloudRedisGrpcAsyncIOTransport(CloudRedisTransport): """gRPC AsyncIO backend transport for CloudRedis. @@ -247,8 +320,11 @@ def __init__(self, *, ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -271,7 +347,7 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel + self._logged_channel ) # Return the client from cache. @@ -305,7 +381,7 @@ def list_instances(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_instances' not in self._stubs: - self._stubs['list_instances'] = self.grpc_channel.unary_unary( + self._stubs['list_instances'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/ListInstances', request_serializer=cloud_redis.ListInstancesRequest.serialize, response_deserializer=cloud_redis.ListInstancesResponse.deserialize, @@ -331,7 +407,7 @@ def get_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_instance' not in self._stubs: - self._stubs['get_instance'] = self.grpc_channel.unary_unary( + self._stubs['get_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/GetInstance', request_serializer=cloud_redis.GetInstanceRequest.serialize, response_deserializer=cloud_redis.Instance.deserialize, @@ -360,7 +436,7 @@ def get_instance_auth_string(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_instance_auth_string' not in self._stubs: - self._stubs['get_instance_auth_string'] = self.grpc_channel.unary_unary( + self._stubs['get_instance_auth_string'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/GetInstanceAuthString', request_serializer=cloud_redis.GetInstanceAuthStringRequest.serialize, response_deserializer=cloud_redis.InstanceAuthString.deserialize, @@ -399,7 +475,7 @@ def create_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_instance' not in self._stubs: - self._stubs['create_instance'] = self.grpc_channel.unary_unary( + self._stubs['create_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/CreateInstance', request_serializer=cloud_redis.CreateInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -430,7 +506,7 @@ def update_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_instance' not in self._stubs: - self._stubs['update_instance'] = self.grpc_channel.unary_unary( + self._stubs['update_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/UpdateInstance', request_serializer=cloud_redis.UpdateInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -457,7 +533,7 @@ def upgrade_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'upgrade_instance' not in self._stubs: - self._stubs['upgrade_instance'] = self.grpc_channel.unary_unary( + self._stubs['upgrade_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/UpgradeInstance', request_serializer=cloud_redis.UpgradeInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -491,7 +567,7 @@ def import_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'import_instance' not in self._stubs: - self._stubs['import_instance'] = self.grpc_channel.unary_unary( + self._stubs['import_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/ImportInstance', request_serializer=cloud_redis.ImportInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -522,7 +598,7 @@ def export_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'export_instance' not in self._stubs: - self._stubs['export_instance'] = self.grpc_channel.unary_unary( + self._stubs['export_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/ExportInstance', request_serializer=cloud_redis.ExportInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -550,7 +626,7 @@ def failover_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'failover_instance' not in self._stubs: - self._stubs['failover_instance'] = self.grpc_channel.unary_unary( + self._stubs['failover_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/FailoverInstance', request_serializer=cloud_redis.FailoverInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -577,7 +653,7 @@ def delete_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_instance' not in self._stubs: - self._stubs['delete_instance'] = self.grpc_channel.unary_unary( + self._stubs['delete_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/DeleteInstance', request_serializer=cloud_redis.DeleteInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -604,7 +680,7 @@ def reschedule_maintenance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'reschedule_maintenance' not in self._stubs: - self._stubs['reschedule_maintenance'] = self.grpc_channel.unary_unary( + self._stubs['reschedule_maintenance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/RescheduleMaintenance', request_serializer=cloud_redis.RescheduleMaintenanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -669,6 +745,41 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + self.wait_operation: self._wrap_method( + self.wait_operation, + default_timeout=None, + client_info=client_info, + ), } def _wrap_method(self, func, *args, **kwargs): @@ -677,7 +788,7 @@ def _wrap_method(self, func, *args, **kwargs): return gapic_v1.method_async.wrap_method(func, *args, **kwargs) def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() @property def kind(self) -> str: @@ -694,7 +805,7 @@ def delete_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + self._stubs["delete_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/DeleteOperation", request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, response_deserializer=None, @@ -712,13 +823,31 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, ) return self._stubs["cancel_operation"] + @property + def wait_operation( + self, + ) -> Callable[[operations_pb2.WaitOperationRequest], None]: + r"""Return a callable for the wait_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "wait_operation" not in self._stubs: + self._stubs["wait_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/WaitOperation", + request_serializer=operations_pb2.WaitOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["wait_operation"] + @property def get_operation( self, @@ -730,7 +859,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -748,7 +877,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, @@ -766,7 +895,7 @@ def list_locations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( + self._stubs["list_locations"] = self._logged_channel.unary_unary( "/google.cloud.location.Locations/ListLocations", request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, response_deserializer=locations_pb2.ListLocationsResponse.FromString, @@ -784,7 +913,7 @@ def get_location( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( + self._stubs["get_location"] = self._logged_channel.unary_unary( "/google.cloud.location.Locations/GetLocation", request_serializer=locations_pb2.GetLocationRequest.SerializeToString, response_deserializer=locations_pb2.Location.FromString, diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 5d99f7e2c3..702c01fb2b 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -13,9 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging +import json # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries @@ -45,11 +46,18 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, - rest_version=requests_version, + rest_version=f"requests@{requests_version}", ) @@ -161,7 +169,7 @@ def post_upgrade_instance(self, response): """ - def pre_create_instance(self, request: cloud_redis.CreateInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.CreateInstanceRequest, Sequence[Tuple[str, str]]]: + def pre_create_instance(self, request: cloud_redis.CreateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.CreateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_instance Override in a subclass to manipulate the request or metadata @@ -172,12 +180,32 @@ def pre_create_instance(self, request: cloud_redis.CreateInstanceRequest, metada def post_create_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for create_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_create_instance` interceptor runs + before the `post_create_instance_with_metadata` interceptor. """ return response - def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.DeleteInstanceRequest, Sequence[Tuple[str, str]]]: + + def post_create_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_create_instance_with_metadata` + interceptor in new development instead of the `post_create_instance` interceptor. + When both interceptors are used, this `post_create_instance_with_metadata` interceptor runs after the + `post_create_instance` interceptor. The (possibly modified) response returned by + `post_create_instance` will be passed to + `post_create_instance_with_metadata`. + """ + return response, metadata + + def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.DeleteInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_instance Override in a subclass to manipulate the request or metadata @@ -188,12 +216,32 @@ def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, metada def post_delete_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for delete_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_delete_instance` interceptor runs + before the `post_delete_instance_with_metadata` interceptor. """ return response - def pre_export_instance(self, request: cloud_redis.ExportInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.ExportInstanceRequest, Sequence[Tuple[str, str]]]: + + def post_delete_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_delete_instance_with_metadata` + interceptor in new development instead of the `post_delete_instance` interceptor. + When both interceptors are used, this `post_delete_instance_with_metadata` interceptor runs after the + `post_delete_instance` interceptor. The (possibly modified) response returned by + `post_delete_instance` will be passed to + `post_delete_instance_with_metadata`. + """ + return response, metadata + + def pre_export_instance(self, request: cloud_redis.ExportInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ExportInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for export_instance Override in a subclass to manipulate the request or metadata @@ -204,12 +252,32 @@ def pre_export_instance(self, request: cloud_redis.ExportInstanceRequest, metada def post_export_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for export_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_export_instance` interceptor runs + before the `post_export_instance_with_metadata` interceptor. """ return response - def pre_failover_instance(self, request: cloud_redis.FailoverInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.FailoverInstanceRequest, Sequence[Tuple[str, str]]]: + + def post_export_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_export_instance_with_metadata` + interceptor in new development instead of the `post_export_instance` interceptor. + When both interceptors are used, this `post_export_instance_with_metadata` interceptor runs after the + `post_export_instance` interceptor. The (possibly modified) response returned by + `post_export_instance` will be passed to + `post_export_instance_with_metadata`. + """ + return response, metadata + + def pre_failover_instance(self, request: cloud_redis.FailoverInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.FailoverInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for failover_instance Override in a subclass to manipulate the request or metadata @@ -220,12 +288,32 @@ def pre_failover_instance(self, request: cloud_redis.FailoverInstanceRequest, me def post_failover_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for failover_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_failover_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_failover_instance` interceptor runs + before the `post_failover_instance_with_metadata` interceptor. """ return response - def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.GetInstanceRequest, Sequence[Tuple[str, str]]]: + + def post_failover_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for failover_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_failover_instance_with_metadata` + interceptor in new development instead of the `post_failover_instance` interceptor. + When both interceptors are used, this `post_failover_instance_with_metadata` interceptor runs after the + `post_failover_instance` interceptor. The (possibly modified) response returned by + `post_failover_instance` will be passed to + `post_failover_instance_with_metadata`. + """ + return response, metadata + + def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.GetInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_instance Override in a subclass to manipulate the request or metadata @@ -236,12 +324,32 @@ def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metadata: Se def post_get_instance(self, response: cloud_redis.Instance) -> cloud_redis.Instance: """Post-rpc interceptor for get_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_get_instance` interceptor runs + before the `post_get_instance_with_metadata` interceptor. """ return response - def pre_get_instance_auth_string(self, request: cloud_redis.GetInstanceAuthStringRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.GetInstanceAuthStringRequest, Sequence[Tuple[str, str]]]: + + def post_get_instance_with_metadata(self, response: cloud_redis.Instance, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_get_instance_with_metadata` + interceptor in new development instead of the `post_get_instance` interceptor. + When both interceptors are used, this `post_get_instance_with_metadata` interceptor runs after the + `post_get_instance` interceptor. The (possibly modified) response returned by + `post_get_instance` will be passed to + `post_get_instance_with_metadata`. + """ + return response, metadata + + def pre_get_instance_auth_string(self, request: cloud_redis.GetInstanceAuthStringRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.GetInstanceAuthStringRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_instance_auth_string Override in a subclass to manipulate the request or metadata @@ -252,12 +360,32 @@ def pre_get_instance_auth_string(self, request: cloud_redis.GetInstanceAuthStrin def post_get_instance_auth_string(self, response: cloud_redis.InstanceAuthString) -> cloud_redis.InstanceAuthString: """Post-rpc interceptor for get_instance_auth_string - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_instance_auth_string_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_get_instance_auth_string` interceptor runs + before the `post_get_instance_auth_string_with_metadata` interceptor. """ return response - def pre_import_instance(self, request: cloud_redis.ImportInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.ImportInstanceRequest, Sequence[Tuple[str, str]]]: + + def post_get_instance_auth_string_with_metadata(self, response: cloud_redis.InstanceAuthString, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.InstanceAuthString, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_instance_auth_string + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_get_instance_auth_string_with_metadata` + interceptor in new development instead of the `post_get_instance_auth_string` interceptor. + When both interceptors are used, this `post_get_instance_auth_string_with_metadata` interceptor runs after the + `post_get_instance_auth_string` interceptor. The (possibly modified) response returned by + `post_get_instance_auth_string` will be passed to + `post_get_instance_auth_string_with_metadata`. + """ + return response, metadata + + def pre_import_instance(self, request: cloud_redis.ImportInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ImportInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for import_instance Override in a subclass to manipulate the request or metadata @@ -268,12 +396,32 @@ def pre_import_instance(self, request: cloud_redis.ImportInstanceRequest, metada def post_import_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for import_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_import_instance` interceptor runs + before the `post_import_instance_with_metadata` interceptor. """ return response - def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.ListInstancesRequest, Sequence[Tuple[str, str]]]: + + def post_import_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_import_instance_with_metadata` + interceptor in new development instead of the `post_import_instance` interceptor. + When both interceptors are used, this `post_import_instance_with_metadata` interceptor runs after the + `post_import_instance` interceptor. The (possibly modified) response returned by + `post_import_instance` will be passed to + `post_import_instance_with_metadata`. + """ + return response, metadata + + def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_instances Override in a subclass to manipulate the request or metadata @@ -284,12 +432,32 @@ def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, metadata def post_list_instances(self, response: cloud_redis.ListInstancesResponse) -> cloud_redis.ListInstancesResponse: """Post-rpc interceptor for list_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_list_instances` interceptor runs + before the `post_list_instances_with_metadata` interceptor. """ return response - def pre_reschedule_maintenance(self, request: cloud_redis.RescheduleMaintenanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.RescheduleMaintenanceRequest, Sequence[Tuple[str, str]]]: + + def post_list_instances_with_metadata(self, response: cloud_redis.ListInstancesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_list_instances_with_metadata` + interceptor in new development instead of the `post_list_instances` interceptor. + When both interceptors are used, this `post_list_instances_with_metadata` interceptor runs after the + `post_list_instances` interceptor. The (possibly modified) response returned by + `post_list_instances` will be passed to + `post_list_instances_with_metadata`. + """ + return response, metadata + + def pre_reschedule_maintenance(self, request: cloud_redis.RescheduleMaintenanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.RescheduleMaintenanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for reschedule_maintenance Override in a subclass to manipulate the request or metadata @@ -300,12 +468,32 @@ def pre_reschedule_maintenance(self, request: cloud_redis.RescheduleMaintenanceR def post_reschedule_maintenance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for reschedule_maintenance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_reschedule_maintenance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_reschedule_maintenance` interceptor runs + before the `post_reschedule_maintenance_with_metadata` interceptor. """ return response - def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.UpdateInstanceRequest, Sequence[Tuple[str, str]]]: + + def post_reschedule_maintenance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for reschedule_maintenance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_reschedule_maintenance_with_metadata` + interceptor in new development instead of the `post_reschedule_maintenance` interceptor. + When both interceptors are used, this `post_reschedule_maintenance_with_metadata` interceptor runs after the + `post_reschedule_maintenance` interceptor. The (possibly modified) response returned by + `post_reschedule_maintenance` will be passed to + `post_reschedule_maintenance_with_metadata`. + """ + return response, metadata + + def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_instance Override in a subclass to manipulate the request or metadata @@ -316,12 +504,32 @@ def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, metada def post_update_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for update_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_update_instance` interceptor runs + before the `post_update_instance_with_metadata` interceptor. """ return response - def pre_upgrade_instance(self, request: cloud_redis.UpgradeInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.UpgradeInstanceRequest, Sequence[Tuple[str, str]]]: + + def post_update_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_update_instance_with_metadata` + interceptor in new development instead of the `post_update_instance` interceptor. + When both interceptors are used, this `post_update_instance_with_metadata` interceptor runs after the + `post_update_instance` interceptor. The (possibly modified) response returned by + `post_update_instance` will be passed to + `post_update_instance_with_metadata`. + """ + return response, metadata + + def pre_upgrade_instance(self, request: cloud_redis.UpgradeInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.UpgradeInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for upgrade_instance Override in a subclass to manipulate the request or metadata @@ -332,15 +540,34 @@ def pre_upgrade_instance(self, request: cloud_redis.UpgradeInstanceRequest, meta def post_upgrade_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for upgrade_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_upgrade_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_upgrade_instance` interceptor runs + before the `post_upgrade_instance_with_metadata` interceptor. """ return response + def post_upgrade_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for upgrade_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_upgrade_instance_with_metadata` + interceptor in new development instead of the `post_upgrade_instance` interceptor. + When both interceptors are used, this `post_upgrade_instance_with_metadata` interceptor runs after the + `post_upgrade_instance` interceptor. The (possibly modified) response returned by + `post_upgrade_instance` will be passed to + `post_upgrade_instance_with_metadata`. + """ + return response, metadata + def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_location Override in a subclass to manipulate the request or metadata @@ -358,9 +585,10 @@ def post_get_location( it is returned to user code. """ return response + def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_locations Override in a subclass to manipulate the request or metadata @@ -378,9 +606,10 @@ def post_list_locations( it is returned to user code. """ return response + def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for cancel_operation Override in a subclass to manipulate the request or metadata @@ -398,9 +627,10 @@ def post_cancel_operation( it is returned to user code. """ return response + def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_operation Override in a subclass to manipulate the request or metadata @@ -418,9 +648,10 @@ def post_delete_operation( it is returned to user code. """ return response + def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata @@ -438,9 +669,10 @@ def post_get_operation( it is returned to user code. """ return response + def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_operations Override in a subclass to manipulate the request or metadata @@ -459,6 +691,27 @@ def post_list_operations( """ return response + def pre_wait_operation( + self, request: operations_pb2.WaitOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.WaitOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for wait_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_wait_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for wait_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + @dataclasses.dataclass class CloudRedisRestStub: @@ -603,6 +856,13 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: 'uri': '/v1/{name=projects/*/locations/*}/operations', }, ], + 'google.longrunning.Operations.WaitOperation': [ + { + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/operations/*}:wait', + 'body': '*', + }, + ], } rest_transport = operations_v1.OperationsRestTransport( @@ -649,7 +909,7 @@ def __call__(self, request: cloud_redis.CreateInstanceRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the create instance method over HTTP. @@ -660,8 +920,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -672,6 +934,7 @@ def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseCreateInstance._get_http_options() + request, metadata = self._interceptor.pre_create_instance(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseCreateInstance._get_transcoded_request(http_options, request) @@ -680,6 +943,29 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseCreateInstance._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.CreateInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "CreateInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = CloudRedisRestTransport._CreateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -691,7 +977,29 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisClient.create_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "CreateInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _DeleteInstance(_BaseCloudRedisRestTransport._BaseDeleteInstance, CloudRedisRestStub): @@ -724,7 +1032,7 @@ def __call__(self, request: cloud_redis.DeleteInstanceRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the delete instance method over HTTP. @@ -735,8 +1043,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -747,12 +1057,36 @@ def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_http_options() + request, metadata = self._interceptor.pre_delete_instance(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "DeleteInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = CloudRedisRestTransport._DeleteInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -764,7 +1098,29 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisClient.delete_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "DeleteInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ExportInstance(_BaseCloudRedisRestTransport._BaseExportInstance, CloudRedisRestStub): @@ -798,7 +1154,7 @@ def __call__(self, request: cloud_redis.ExportInstanceRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the export instance method over HTTP. @@ -809,8 +1165,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -821,6 +1179,7 @@ def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseExportInstance._get_http_options() + request, metadata = self._interceptor.pre_export_instance(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseExportInstance._get_transcoded_request(http_options, request) @@ -829,6 +1188,29 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseExportInstance._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ExportInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ExportInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = CloudRedisRestTransport._ExportInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -840,7 +1222,29 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisClient.export_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ExportInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _FailoverInstance(_BaseCloudRedisRestTransport._BaseFailoverInstance, CloudRedisRestStub): @@ -874,7 +1278,7 @@ def __call__(self, request: cloud_redis.FailoverInstanceRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the failover instance method over HTTP. @@ -885,8 +1289,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -897,6 +1303,7 @@ def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_http_options() + request, metadata = self._interceptor.pre_failover_instance(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_transcoded_request(http_options, request) @@ -905,6 +1312,29 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.FailoverInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "FailoverInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = CloudRedisRestTransport._FailoverInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -916,7 +1346,29 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_failover_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_failover_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisClient.failover_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "FailoverInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _GetInstance(_BaseCloudRedisRestTransport._BaseGetInstance, CloudRedisRestStub): @@ -949,7 +1401,7 @@ def __call__(self, request: cloud_redis.GetInstanceRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> cloud_redis.Instance: r"""Call the get instance method over HTTP. @@ -960,8 +1412,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.cloud_redis.Instance: @@ -969,12 +1423,36 @@ def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseGetInstance._get_http_options() + request, metadata = self._interceptor.pre_get_instance(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseGetInstance._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = CloudRedisRestTransport._GetInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -988,7 +1466,29 @@ def __call__(self, pb_resp = cloud_redis.Instance.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = cloud_redis.Instance.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisClient.get_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _GetInstanceAuthString(_BaseCloudRedisRestTransport._BaseGetInstanceAuthString, CloudRedisRestStub): @@ -1021,7 +1521,7 @@ def __call__(self, request: cloud_redis.GetInstanceAuthStringRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> cloud_redis.InstanceAuthString: r"""Call the get instance auth string method over HTTP. @@ -1032,8 +1532,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.cloud_redis.InstanceAuthString: @@ -1041,12 +1543,36 @@ def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_http_options() + request, metadata = self._interceptor.pre_get_instance_auth_string(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetInstanceAuthString", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetInstanceAuthString", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = CloudRedisRestTransport._GetInstanceAuthString._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1060,7 +1586,29 @@ def __call__(self, pb_resp = cloud_redis.InstanceAuthString.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_instance_auth_string(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_instance_auth_string_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = cloud_redis.InstanceAuthString.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisClient.get_instance_auth_string", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetInstanceAuthString", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ImportInstance(_BaseCloudRedisRestTransport._BaseImportInstance, CloudRedisRestStub): @@ -1094,7 +1642,7 @@ def __call__(self, request: cloud_redis.ImportInstanceRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the import instance method over HTTP. @@ -1105,8 +1653,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1117,6 +1667,7 @@ def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseImportInstance._get_http_options() + request, metadata = self._interceptor.pre_import_instance(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseImportInstance._get_transcoded_request(http_options, request) @@ -1125,6 +1676,29 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseImportInstance._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ImportInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ImportInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = CloudRedisRestTransport._ImportInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -1136,7 +1710,29 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_import_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisClient.import_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ImportInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ListInstances(_BaseCloudRedisRestTransport._BaseListInstances, CloudRedisRestStub): @@ -1169,7 +1765,7 @@ def __call__(self, request: cloud_redis.ListInstancesRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> cloud_redis.ListInstancesResponse: r"""Call the list instances method over HTTP. @@ -1180,8 +1776,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.cloud_redis.ListInstancesResponse: @@ -1191,12 +1789,36 @@ def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseListInstances._get_http_options() + request, metadata = self._interceptor.pre_list_instances(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseListInstances._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseListInstances._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListInstances", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListInstances", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = CloudRedisRestTransport._ListInstances._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1210,7 +1832,29 @@ def __call__(self, pb_resp = cloud_redis.ListInstancesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_instances_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = cloud_redis.ListInstancesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisClient.list_instances", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListInstances", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _RescheduleMaintenance(_BaseCloudRedisRestTransport._BaseRescheduleMaintenance, CloudRedisRestStub): @@ -1244,7 +1888,7 @@ def __call__(self, request: cloud_redis.RescheduleMaintenanceRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the reschedule maintenance method over HTTP. @@ -1255,8 +1899,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1267,6 +1913,7 @@ def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_http_options() + request, metadata = self._interceptor.pre_reschedule_maintenance(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_transcoded_request(http_options, request) @@ -1275,6 +1922,29 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.RescheduleMaintenance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "RescheduleMaintenance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = CloudRedisRestTransport._RescheduleMaintenance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -1286,7 +1956,29 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_reschedule_maintenance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_reschedule_maintenance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisClient.reschedule_maintenance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "RescheduleMaintenance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _UpdateInstance(_BaseCloudRedisRestTransport._BaseUpdateInstance, CloudRedisRestStub): @@ -1320,7 +2012,7 @@ def __call__(self, request: cloud_redis.UpdateInstanceRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the update instance method over HTTP. @@ -1331,8 +2023,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1343,6 +2037,7 @@ def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_http_options() + request, metadata = self._interceptor.pre_update_instance(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_transcoded_request(http_options, request) @@ -1351,6 +2046,29 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.UpdateInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "UpdateInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = CloudRedisRestTransport._UpdateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -1362,7 +2080,29 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisClient.update_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "UpdateInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _UpgradeInstance(_BaseCloudRedisRestTransport._BaseUpgradeInstance, CloudRedisRestStub): @@ -1396,7 +2136,7 @@ def __call__(self, request: cloud_redis.UpgradeInstanceRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the upgrade instance method over HTTP. @@ -1407,8 +2147,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1419,6 +2161,7 @@ def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_http_options() + request, metadata = self._interceptor.pre_upgrade_instance(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_transcoded_request(http_options, request) @@ -1427,6 +2170,29 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.UpgradeInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "UpgradeInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = CloudRedisRestTransport._UpgradeInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -1438,7 +2204,29 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_upgrade_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_upgrade_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisClient.upgrade_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "UpgradeInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property @@ -1534,6 +2322,9 @@ def get_location(self): return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore class _GetLocation(_BaseCloudRedisRestTransport._BaseGetLocation, CloudRedisRestStub): + def __hash__(self): + return hash("CloudRedisRestTransport.GetLocation") + @staticmethod def _get_response( host, @@ -1560,7 +2351,7 @@ def __call__(self, request: locations_pb2.GetLocationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> locations_pb2.Location: r"""Call the get location method over HTTP. @@ -1571,20 +2362,46 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: locations_pb2.Location: Response from GetLocation method. """ http_options = _BaseCloudRedisRestTransport._BaseGetLocation._get_http_options() + request, metadata = self._interceptor.pre_get_location(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetLocation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetLocation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = CloudRedisRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1593,9 +2410,29 @@ def __call__(self, if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = locations_pb2.Location() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.GetLocation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetLocation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property @@ -1603,6 +2440,9 @@ def list_locations(self): return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore class _ListLocations(_BaseCloudRedisRestTransport._BaseListLocations, CloudRedisRestStub): + def __hash__(self): + return hash("CloudRedisRestTransport.ListLocations") + @staticmethod def _get_response( host, @@ -1629,7 +2469,7 @@ def __call__(self, request: locations_pb2.ListLocationsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> locations_pb2.ListLocationsResponse: r"""Call the list locations method over HTTP. @@ -1640,20 +2480,46 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: locations_pb2.ListLocationsResponse: Response from ListLocations method. """ http_options = _BaseCloudRedisRestTransport._BaseListLocations._get_http_options() + request, metadata = self._interceptor.pre_list_locations(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseListLocations._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseListLocations._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListLocations", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListLocations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = CloudRedisRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1662,9 +2528,29 @@ def __call__(self, if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = locations_pb2.ListLocationsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.ListLocations", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListLocations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property @@ -1672,6 +2558,9 @@ def cancel_operation(self): return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore class _CancelOperation(_BaseCloudRedisRestTransport._BaseCancelOperation, CloudRedisRestStub): + def __hash__(self): + return hash("CloudRedisRestTransport.CancelOperation") + @staticmethod def _get_response( host, @@ -1698,7 +2587,7 @@ def __call__(self, request: operations_pb2.CancelOperationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> None: r"""Call the cancel operation method over HTTP. @@ -1709,17 +2598,43 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = _BaseCloudRedisRestTransport._BaseCancelOperation._get_http_options() + request, metadata = self._interceptor.pre_cancel_operation(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.CancelOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = CloudRedisRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1735,6 +2650,9 @@ def delete_operation(self): return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore class _DeleteOperation(_BaseCloudRedisRestTransport._BaseDeleteOperation, CloudRedisRestStub): + def __hash__(self): + return hash("CloudRedisRestTransport.DeleteOperation") + @staticmethod def _get_response( host, @@ -1761,7 +2679,7 @@ def __call__(self, request: operations_pb2.DeleteOperationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> None: r"""Call the delete operation method over HTTP. @@ -1772,17 +2690,43 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_http_options() + request, metadata = self._interceptor.pre_delete_operation(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = CloudRedisRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1798,6 +2742,9 @@ def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore class _GetOperation(_BaseCloudRedisRestTransport._BaseGetOperation, CloudRedisRestStub): + def __hash__(self): + return hash("CloudRedisRestTransport.GetOperation") + @staticmethod def _get_response( host, @@ -1824,7 +2771,7 @@ def __call__(self, request: operations_pb2.GetOperationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. @@ -1835,20 +2782,46 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: operations_pb2.Operation: Response from GetOperation method. """ http_options = _BaseCloudRedisRestTransport._BaseGetOperation._get_http_options() + request, metadata = self._interceptor.pre_get_operation(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = CloudRedisRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1857,9 +2830,29 @@ def __call__(self, if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.GetOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property @@ -1867,6 +2860,9 @@ def list_operations(self): return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore class _ListOperations(_BaseCloudRedisRestTransport._BaseListOperations, CloudRedisRestStub): + def __hash__(self): + return hash("CloudRedisRestTransport.ListOperations") + @staticmethod def _get_response( host, @@ -1893,7 +2889,7 @@ def __call__(self, request: operations_pb2.ListOperationsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.ListOperationsResponse: r"""Call the list operations method over HTTP. @@ -1904,20 +2900,46 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: operations_pb2.ListOperationsResponse: Response from ListOperations method. """ http_options = _BaseCloudRedisRestTransport._BaseListOperations._get_http_options() + request, metadata = self._interceptor.pre_list_operations(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseListOperations._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListOperations", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = CloudRedisRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1926,9 +2948,150 @@ def __call__(self, if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.ListOperations", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def wait_operation(self): + return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore + + class _WaitOperation(_BaseCloudRedisRestTransport._BaseWaitOperation, CloudRedisRestStub): + def __hash__(self): + return hash("CloudRedisRestTransport.WaitOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: operations_pb2.WaitOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + + r"""Call the wait operation method over HTTP. + + Args: + request (operations_pb2.WaitOperationRequest): + The request object for WaitOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from WaitOperation method. + """ + + http_options = _BaseCloudRedisRestTransport._BaseWaitOperation._get_http_options() + + request, metadata = self._interceptor.pre_wait_operation(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseWaitOperation._get_transcoded_request(http_options, request) + + body = _BaseCloudRedisRestTransport._BaseWaitOperation._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseWaitOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.WaitOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "WaitOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CloudRedisRestTransport._WaitOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_wait_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.WaitOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "WaitOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py index 5127656c34..24f3370c48 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py @@ -13,20 +13,725 @@ # See the License for the specific language governing permissions and # limitations under the License. # + +import google.auth +try: + import aiohttp # type: ignore + from google.auth.aio.transport.sessions import AsyncAuthorizedSession # type: ignore + from google.api_core import rest_streaming_async # type: ignore + from google.api_core.operations_v1 import AsyncOperationsRestClient # type: ignore +except ImportError as e: # pragma: NO COVER + raise ImportError("`rest_asyncio` transport requires the library to be installed with the `async_rest` extra. Install the library with the `async_rest` extra using `pip install google-cloud-redis[async_rest]`") from e + +from google.auth.aio import credentials as ga_credentials_async # type: ignore + +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 +from google.api_core import operations_v1 +from google.cloud.location import locations_pb2 # type: ignore +from google.api_core import retry_async as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming_async # type: ignore + + +from google.protobuf import json_format +from google.api_core import operations_v1 +from google.cloud.location import locations_pb2 # type: ignore + +import json # type: ignore +import dataclasses +from typing import Any, Dict, List, Callable, Tuple, Optional, Sequence, Union + + +from google.cloud.redis_v1.types import cloud_redis +from google.longrunning import operations_pb2 # type: ignore -from typing import Any, Optional from .rest_base import _BaseCloudRedisRestTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +import logging + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, - rest_version=None, + rest_version=f"google-auth@{google.auth.__version__}", ) + +class AsyncCloudRedisRestInterceptor: + """Asynchronous Interceptor for CloudRedis. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the AsyncCloudRedisRestTransport. + + .. code-block:: python + class MyCustomCloudRedisInterceptor(CloudRedisRestInterceptor): + async def pre_create_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + async def post_create_instance(self, response): + logging.log(f"Received response: {response}") + return response + + async def pre_delete_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + async def post_delete_instance(self, response): + logging.log(f"Received response: {response}") + return response + + async def pre_export_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + async def post_export_instance(self, response): + logging.log(f"Received response: {response}") + return response + + async def pre_failover_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + async def post_failover_instance(self, response): + logging.log(f"Received response: {response}") + return response + + async def pre_get_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + async def post_get_instance(self, response): + logging.log(f"Received response: {response}") + return response + + async def pre_get_instance_auth_string(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + async def post_get_instance_auth_string(self, response): + logging.log(f"Received response: {response}") + return response + + async def pre_import_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + async def post_import_instance(self, response): + logging.log(f"Received response: {response}") + return response + + async def pre_list_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + async def post_list_instances(self, response): + logging.log(f"Received response: {response}") + return response + + async def pre_reschedule_maintenance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + async def post_reschedule_maintenance(self, response): + logging.log(f"Received response: {response}") + return response + + async def pre_update_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + async def post_update_instance(self, response): + logging.log(f"Received response: {response}") + return response + + async def pre_upgrade_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + async def post_upgrade_instance(self, response): + logging.log(f"Received response: {response}") + return response + + transport = AsyncCloudRedisRestTransport(interceptor=MyCustomCloudRedisInterceptor()) + client = async CloudRedisClient(transport=transport) + + + """ + async def pre_create_instance(self, request: cloud_redis.CreateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.CreateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_create_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_instance + + DEPRECATED. Please use the `post_create_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. This `post_create_instance` interceptor runs + before the `post_create_instance_with_metadata` interceptor. + """ + return response + + async def post_create_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_create_instance_with_metadata` + interceptor in new development instead of the `post_create_instance` interceptor. + When both interceptors are used, this `post_create_instance_with_metadata` interceptor runs after the + `post_create_instance` interceptor. The (possibly modified) response returned by + `post_create_instance` will be passed to + `post_create_instance_with_metadata`. + """ + return response, metadata + + async def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.DeleteInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_delete_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_instance + + DEPRECATED. Please use the `post_delete_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. This `post_delete_instance` interceptor runs + before the `post_delete_instance_with_metadata` interceptor. + """ + return response + + async def post_delete_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_delete_instance_with_metadata` + interceptor in new development instead of the `post_delete_instance` interceptor. + When both interceptors are used, this `post_delete_instance_with_metadata` interceptor runs after the + `post_delete_instance` interceptor. The (possibly modified) response returned by + `post_delete_instance` will be passed to + `post_delete_instance_with_metadata`. + """ + return response, metadata + + async def pre_export_instance(self, request: cloud_redis.ExportInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ExportInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for export_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_export_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for export_instance + + DEPRECATED. Please use the `post_export_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. This `post_export_instance` interceptor runs + before the `post_export_instance_with_metadata` interceptor. + """ + return response + + async def post_export_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_export_instance_with_metadata` + interceptor in new development instead of the `post_export_instance` interceptor. + When both interceptors are used, this `post_export_instance_with_metadata` interceptor runs after the + `post_export_instance` interceptor. The (possibly modified) response returned by + `post_export_instance` will be passed to + `post_export_instance_with_metadata`. + """ + return response, metadata + + async def pre_failover_instance(self, request: cloud_redis.FailoverInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.FailoverInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for failover_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_failover_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for failover_instance + + DEPRECATED. Please use the `post_failover_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. This `post_failover_instance` interceptor runs + before the `post_failover_instance_with_metadata` interceptor. + """ + return response + + async def post_failover_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for failover_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_failover_instance_with_metadata` + interceptor in new development instead of the `post_failover_instance` interceptor. + When both interceptors are used, this `post_failover_instance_with_metadata` interceptor runs after the + `post_failover_instance` interceptor. The (possibly modified) response returned by + `post_failover_instance` will be passed to + `post_failover_instance_with_metadata`. + """ + return response, metadata + + async def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.GetInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_get_instance(self, response: cloud_redis.Instance) -> cloud_redis.Instance: + """Post-rpc interceptor for get_instance + + DEPRECATED. Please use the `post_get_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. This `post_get_instance` interceptor runs + before the `post_get_instance_with_metadata` interceptor. + """ + return response + + async def post_get_instance_with_metadata(self, response: cloud_redis.Instance, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_get_instance_with_metadata` + interceptor in new development instead of the `post_get_instance` interceptor. + When both interceptors are used, this `post_get_instance_with_metadata` interceptor runs after the + `post_get_instance` interceptor. The (possibly modified) response returned by + `post_get_instance` will be passed to + `post_get_instance_with_metadata`. + """ + return response, metadata + + async def pre_get_instance_auth_string(self, request: cloud_redis.GetInstanceAuthStringRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.GetInstanceAuthStringRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_instance_auth_string + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_get_instance_auth_string(self, response: cloud_redis.InstanceAuthString) -> cloud_redis.InstanceAuthString: + """Post-rpc interceptor for get_instance_auth_string + + DEPRECATED. Please use the `post_get_instance_auth_string_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. This `post_get_instance_auth_string` interceptor runs + before the `post_get_instance_auth_string_with_metadata` interceptor. + """ + return response + + async def post_get_instance_auth_string_with_metadata(self, response: cloud_redis.InstanceAuthString, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.InstanceAuthString, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_instance_auth_string + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_get_instance_auth_string_with_metadata` + interceptor in new development instead of the `post_get_instance_auth_string` interceptor. + When both interceptors are used, this `post_get_instance_auth_string_with_metadata` interceptor runs after the + `post_get_instance_auth_string` interceptor. The (possibly modified) response returned by + `post_get_instance_auth_string` will be passed to + `post_get_instance_auth_string_with_metadata`. + """ + return response, metadata + + async def pre_import_instance(self, request: cloud_redis.ImportInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ImportInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for import_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_import_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for import_instance + + DEPRECATED. Please use the `post_import_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. This `post_import_instance` interceptor runs + before the `post_import_instance_with_metadata` interceptor. + """ + return response + + async def post_import_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_import_instance_with_metadata` + interceptor in new development instead of the `post_import_instance` interceptor. + When both interceptors are used, this `post_import_instance_with_metadata` interceptor runs after the + `post_import_instance` interceptor. The (possibly modified) response returned by + `post_import_instance` will be passed to + `post_import_instance_with_metadata`. + """ + return response, metadata + + async def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_list_instances(self, response: cloud_redis.ListInstancesResponse) -> cloud_redis.ListInstancesResponse: + """Post-rpc interceptor for list_instances + + DEPRECATED. Please use the `post_list_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. This `post_list_instances` interceptor runs + before the `post_list_instances_with_metadata` interceptor. + """ + return response + + async def post_list_instances_with_metadata(self, response: cloud_redis.ListInstancesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_list_instances_with_metadata` + interceptor in new development instead of the `post_list_instances` interceptor. + When both interceptors are used, this `post_list_instances_with_metadata` interceptor runs after the + `post_list_instances` interceptor. The (possibly modified) response returned by + `post_list_instances` will be passed to + `post_list_instances_with_metadata`. + """ + return response, metadata + + async def pre_reschedule_maintenance(self, request: cloud_redis.RescheduleMaintenanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.RescheduleMaintenanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for reschedule_maintenance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_reschedule_maintenance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for reschedule_maintenance + + DEPRECATED. Please use the `post_reschedule_maintenance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. This `post_reschedule_maintenance` interceptor runs + before the `post_reschedule_maintenance_with_metadata` interceptor. + """ + return response + + async def post_reschedule_maintenance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for reschedule_maintenance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_reschedule_maintenance_with_metadata` + interceptor in new development instead of the `post_reschedule_maintenance` interceptor. + When both interceptors are used, this `post_reschedule_maintenance_with_metadata` interceptor runs after the + `post_reschedule_maintenance` interceptor. The (possibly modified) response returned by + `post_reschedule_maintenance` will be passed to + `post_reschedule_maintenance_with_metadata`. + """ + return response, metadata + + async def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_update_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_instance + + DEPRECATED. Please use the `post_update_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. This `post_update_instance` interceptor runs + before the `post_update_instance_with_metadata` interceptor. + """ + return response + + async def post_update_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_update_instance_with_metadata` + interceptor in new development instead of the `post_update_instance` interceptor. + When both interceptors are used, this `post_update_instance_with_metadata` interceptor runs after the + `post_update_instance` interceptor. The (possibly modified) response returned by + `post_update_instance` will be passed to + `post_update_instance_with_metadata`. + """ + return response, metadata + + async def pre_upgrade_instance(self, request: cloud_redis.UpgradeInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.UpgradeInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for upgrade_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_upgrade_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for upgrade_instance + + DEPRECATED. Please use the `post_upgrade_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. This `post_upgrade_instance` interceptor runs + before the `post_upgrade_instance_with_metadata` interceptor. + """ + return response + + async def post_upgrade_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for upgrade_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_upgrade_instance_with_metadata` + interceptor in new development instead of the `post_upgrade_instance` interceptor. + When both interceptors are used, this `post_upgrade_instance_with_metadata` interceptor runs after the + `post_upgrade_instance` interceptor. The (possibly modified) response returned by + `post_upgrade_instance` will be passed to + `post_upgrade_instance_with_metadata`. + """ + return response, metadata + + async def pre_get_location( + self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + async def pre_list_locations( + self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + async def pre_cancel_operation( + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_cancel_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + async def pre_delete_operation( + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_delete_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + async def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + async def pre_list_operations( + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + async def pre_wait_operation( + self, request: operations_pb2.WaitOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.WaitOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for wait_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_wait_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for wait_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class AsyncCloudRedisRestStub: + _session: AsyncAuthorizedSession + _host: str + _interceptor: AsyncCloudRedisRestInterceptor + class AsyncCloudRedisRestTransport(_BaseCloudRedisRestTransport): """Asynchronous REST backend transport for CloudRedis. @@ -58,11 +763,13 @@ class AsyncCloudRedisRestTransport(_BaseCloudRedisRestTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, + def __init__(self, + *, host: str = 'redis.googleapis.com', - credentials: Optional[Any] = None, + credentials: Optional[ga_credentials_async.Credentials] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, url_scheme: str = 'https', + interceptor: Optional[AsyncCloudRedisRestInterceptor] = None, ) -> None: """Instantiate the transport. @@ -73,7 +780,7 @@ def __init__(self, *, Args: host (Optional[str]): The hostname to connect to (default: 'redis.googleapis.com'). - credentials (Optional[Any]): The + credentials (Optional[google.auth.aio.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the @@ -96,7 +803,2453 @@ def __init__(self, *, url_scheme=url_scheme, api_audience=None ) + self._session = AsyncAuthorizedSession(self._credentials) # type: ignore + self._interceptor = interceptor or AsyncCloudRedisRestInterceptor() + self._wrap_with_kind = True + self._prep_wrapped_messages(client_info) + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = None - @property - def kind(self) -> str: - return "rest_asyncio" + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_instances: self._wrap_method( + self.list_instances, + default_timeout=600.0, + client_info=client_info, + ), + self.get_instance: self._wrap_method( + self.get_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.get_instance_auth_string: self._wrap_method( + self.get_instance_auth_string, + default_timeout=600.0, + client_info=client_info, + ), + self.create_instance: self._wrap_method( + self.create_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.update_instance: self._wrap_method( + self.update_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.upgrade_instance: self._wrap_method( + self.upgrade_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.import_instance: self._wrap_method( + self.import_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.export_instance: self._wrap_method( + self.export_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.failover_instance: self._wrap_method( + self.failover_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.delete_instance: self._wrap_method( + self.delete_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.reschedule_maintenance: self._wrap_method( + self.reschedule_maintenance, + default_timeout=None, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + self.wait_operation: self._wrap_method( + self.wait_operation, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + class _CreateInstance(_BaseCloudRedisRestTransport._BaseCreateInstance, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.CreateInstance") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + async def __call__(self, + request: cloud_redis.CreateInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create instance method over HTTP. + + Args: + request (~.cloud_redis.CreateInstanceRequest): + The request object. Request for + [CreateInstance][google.cloud.redis.v1.CloudRedis.CreateInstance]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCloudRedisRestTransport._BaseCreateInstance._get_http_options() + + request, metadata = await self._interceptor.pre_create_instance(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseCreateInstance._get_transcoded_request(http_options, request) + + body = _BaseCloudRedisRestTransport._BaseCreateInstance._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseCreateInstance._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.CreateInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "CreateInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = await AsyncCloudRedisRestTransport._CreateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + resp = operations_pb2.Operation() + pb_resp = resp + content = await response.read() + json_format.Parse(content, pb_resp, ignore_unknown_fields=True) + resp = await self._interceptor.post_create_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = await self._interceptor.post_create_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.create_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "CreateInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + + return resp + + class _DeleteInstance(_BaseCloudRedisRestTransport._BaseDeleteInstance, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.DeleteInstance") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + async def __call__(self, + request: cloud_redis.DeleteInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete instance method over HTTP. + + Args: + request (~.cloud_redis.DeleteInstanceRequest): + The request object. Request for + [DeleteInstance][google.cloud.redis.v1.CloudRedis.DeleteInstance]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_http_options() + + request, metadata = await self._interceptor.pre_delete_instance(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "DeleteInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = await AsyncCloudRedisRestTransport._DeleteInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + resp = operations_pb2.Operation() + pb_resp = resp + content = await response.read() + json_format.Parse(content, pb_resp, ignore_unknown_fields=True) + resp = await self._interceptor.post_delete_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = await self._interceptor.post_delete_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.delete_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "DeleteInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + + return resp + + class _ExportInstance(_BaseCloudRedisRestTransport._BaseExportInstance, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.ExportInstance") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + async def __call__(self, + request: cloud_redis.ExportInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the export instance method over HTTP. + + Args: + request (~.cloud_redis.ExportInstanceRequest): + The request object. Request for + [Export][google.cloud.redis.v1.CloudRedis.ExportInstance]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCloudRedisRestTransport._BaseExportInstance._get_http_options() + + request, metadata = await self._interceptor.pre_export_instance(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseExportInstance._get_transcoded_request(http_options, request) + + body = _BaseCloudRedisRestTransport._BaseExportInstance._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseExportInstance._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ExportInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ExportInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = await AsyncCloudRedisRestTransport._ExportInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + resp = operations_pb2.Operation() + pb_resp = resp + content = await response.read() + json_format.Parse(content, pb_resp, ignore_unknown_fields=True) + resp = await self._interceptor.post_export_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = await self._interceptor.post_export_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.export_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ExportInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + + return resp + + class _FailoverInstance(_BaseCloudRedisRestTransport._BaseFailoverInstance, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.FailoverInstance") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + async def __call__(self, + request: cloud_redis.FailoverInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the failover instance method over HTTP. + + Args: + request (~.cloud_redis.FailoverInstanceRequest): + The request object. Request for + [Failover][google.cloud.redis.v1.CloudRedis.FailoverInstance]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_http_options() + + request, metadata = await self._interceptor.pre_failover_instance(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_transcoded_request(http_options, request) + + body = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.FailoverInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "FailoverInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = await AsyncCloudRedisRestTransport._FailoverInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + resp = operations_pb2.Operation() + pb_resp = resp + content = await response.read() + json_format.Parse(content, pb_resp, ignore_unknown_fields=True) + resp = await self._interceptor.post_failover_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = await self._interceptor.post_failover_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.failover_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "FailoverInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + + return resp + + class _GetInstance(_BaseCloudRedisRestTransport._BaseGetInstance, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.GetInstance") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + async def __call__(self, + request: cloud_redis.GetInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> cloud_redis.Instance: + r"""Call the get instance method over HTTP. + + Args: + request (~.cloud_redis.GetInstanceRequest): + The request object. Request for + [GetInstance][google.cloud.redis.v1.CloudRedis.GetInstance]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.cloud_redis.Instance: + A Memorystore for Redis instance. + """ + + http_options = _BaseCloudRedisRestTransport._BaseGetInstance._get_http_options() + + request, metadata = await self._interceptor.pre_get_instance(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseGetInstance._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = await AsyncCloudRedisRestTransport._GetInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + resp = cloud_redis.Instance() + pb_resp = cloud_redis.Instance.pb(resp) + content = await response.read() + json_format.Parse(content, pb_resp, ignore_unknown_fields=True) + resp = await self._interceptor.post_get_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = await self._interceptor.post_get_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = cloud_redis.Instance.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.get_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + + return resp + + class _GetInstanceAuthString(_BaseCloudRedisRestTransport._BaseGetInstanceAuthString, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.GetInstanceAuthString") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + async def __call__(self, + request: cloud_redis.GetInstanceAuthStringRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> cloud_redis.InstanceAuthString: + r"""Call the get instance auth string method over HTTP. + + Args: + request (~.cloud_redis.GetInstanceAuthStringRequest): + The request object. Request for + [GetInstanceAuthString][google.cloud.redis.v1.CloudRedis.GetInstanceAuthString]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.cloud_redis.InstanceAuthString: + Instance AUTH string details. + """ + + http_options = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_http_options() + + request, metadata = await self._interceptor.pre_get_instance_auth_string(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetInstanceAuthString", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetInstanceAuthString", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = await AsyncCloudRedisRestTransport._GetInstanceAuthString._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + resp = cloud_redis.InstanceAuthString() + pb_resp = cloud_redis.InstanceAuthString.pb(resp) + content = await response.read() + json_format.Parse(content, pb_resp, ignore_unknown_fields=True) + resp = await self._interceptor.post_get_instance_auth_string(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = await self._interceptor.post_get_instance_auth_string_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = cloud_redis.InstanceAuthString.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.get_instance_auth_string", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetInstanceAuthString", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + + return resp + + class _ImportInstance(_BaseCloudRedisRestTransport._BaseImportInstance, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.ImportInstance") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + async def __call__(self, + request: cloud_redis.ImportInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the import instance method over HTTP. + + Args: + request (~.cloud_redis.ImportInstanceRequest): + The request object. Request for + [Import][google.cloud.redis.v1.CloudRedis.ImportInstance]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCloudRedisRestTransport._BaseImportInstance._get_http_options() + + request, metadata = await self._interceptor.pre_import_instance(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseImportInstance._get_transcoded_request(http_options, request) + + body = _BaseCloudRedisRestTransport._BaseImportInstance._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseImportInstance._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ImportInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ImportInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = await AsyncCloudRedisRestTransport._ImportInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + resp = operations_pb2.Operation() + pb_resp = resp + content = await response.read() + json_format.Parse(content, pb_resp, ignore_unknown_fields=True) + resp = await self._interceptor.post_import_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = await self._interceptor.post_import_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.import_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ImportInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + + return resp + + class _ListInstances(_BaseCloudRedisRestTransport._BaseListInstances, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.ListInstances") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + async def __call__(self, + request: cloud_redis.ListInstancesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> cloud_redis.ListInstancesResponse: + r"""Call the list instances method over HTTP. + + Args: + request (~.cloud_redis.ListInstancesRequest): + The request object. Request for + [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.cloud_redis.ListInstancesResponse: + Response for + [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]. + + """ + + http_options = _BaseCloudRedisRestTransport._BaseListInstances._get_http_options() + + request, metadata = await self._interceptor.pre_list_instances(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseListInstances._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseListInstances._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListInstances", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListInstances", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = await AsyncCloudRedisRestTransport._ListInstances._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + resp = cloud_redis.ListInstancesResponse() + pb_resp = cloud_redis.ListInstancesResponse.pb(resp) + content = await response.read() + json_format.Parse(content, pb_resp, ignore_unknown_fields=True) + resp = await self._interceptor.post_list_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = await self._interceptor.post_list_instances_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = cloud_redis.ListInstancesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.list_instances", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListInstances", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + + return resp + + class _RescheduleMaintenance(_BaseCloudRedisRestTransport._BaseRescheduleMaintenance, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.RescheduleMaintenance") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + async def __call__(self, + request: cloud_redis.RescheduleMaintenanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the reschedule maintenance method over HTTP. + + Args: + request (~.cloud_redis.RescheduleMaintenanceRequest): + The request object. Request for + [RescheduleMaintenance][google.cloud.redis.v1.CloudRedis.RescheduleMaintenance]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_http_options() + + request, metadata = await self._interceptor.pre_reschedule_maintenance(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_transcoded_request(http_options, request) + + body = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.RescheduleMaintenance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "RescheduleMaintenance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = await AsyncCloudRedisRestTransport._RescheduleMaintenance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + resp = operations_pb2.Operation() + pb_resp = resp + content = await response.read() + json_format.Parse(content, pb_resp, ignore_unknown_fields=True) + resp = await self._interceptor.post_reschedule_maintenance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = await self._interceptor.post_reschedule_maintenance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.reschedule_maintenance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "RescheduleMaintenance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + + return resp + + class _UpdateInstance(_BaseCloudRedisRestTransport._BaseUpdateInstance, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.UpdateInstance") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + async def __call__(self, + request: cloud_redis.UpdateInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the update instance method over HTTP. + + Args: + request (~.cloud_redis.UpdateInstanceRequest): + The request object. Request for + [UpdateInstance][google.cloud.redis.v1.CloudRedis.UpdateInstance]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_http_options() + + request, metadata = await self._interceptor.pre_update_instance(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_transcoded_request(http_options, request) + + body = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.UpdateInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "UpdateInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = await AsyncCloudRedisRestTransport._UpdateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + resp = operations_pb2.Operation() + pb_resp = resp + content = await response.read() + json_format.Parse(content, pb_resp, ignore_unknown_fields=True) + resp = await self._interceptor.post_update_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = await self._interceptor.post_update_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.update_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "UpdateInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + + return resp + + class _UpgradeInstance(_BaseCloudRedisRestTransport._BaseUpgradeInstance, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.UpgradeInstance") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + async def __call__(self, + request: cloud_redis.UpgradeInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the upgrade instance method over HTTP. + + Args: + request (~.cloud_redis.UpgradeInstanceRequest): + The request object. Request for + [UpgradeInstance][google.cloud.redis.v1.CloudRedis.UpgradeInstance]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_http_options() + + request, metadata = await self._interceptor.pre_upgrade_instance(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_transcoded_request(http_options, request) + + body = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.UpgradeInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "UpgradeInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = await AsyncCloudRedisRestTransport._UpgradeInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + resp = operations_pb2.Operation() + pb_resp = resp + content = await response.read() + json_format.Parse(content, pb_resp, ignore_unknown_fields=True) + resp = await self._interceptor.post_upgrade_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = await self._interceptor.post_upgrade_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.upgrade_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "UpgradeInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + + return resp + + @property + def operations_client(self) -> AsyncOperationsRestClient: + """Create the async client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.CancelOperation': [ + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + }, + ], + 'google.longrunning.Operations.DeleteOperation': [ + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.GetOperation': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.ListOperations': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + ], + 'google.longrunning.Operations.WaitOperation': [ + { + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/operations/*}:wait', + 'body': '*', + }, + ], + } + + rest_transport = operations_v1.AsyncOperationsRestTransport( # type: ignore + host=self._host, + # use the credentials which are saved + credentials=self._credentials, # type: ignore + http_options=http_options, + path_prefix="v1" + ) + + self._operations_client = AsyncOperationsRestClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + @property + def create_instance(self) -> Callable[ + [cloud_redis.CreateInstanceRequest], + operations_pb2.Operation]: + return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_instance(self) -> Callable[ + [cloud_redis.DeleteInstanceRequest], + operations_pb2.Operation]: + return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def export_instance(self) -> Callable[ + [cloud_redis.ExportInstanceRequest], + operations_pb2.Operation]: + return self._ExportInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def failover_instance(self) -> Callable[ + [cloud_redis.FailoverInstanceRequest], + operations_pb2.Operation]: + return self._FailoverInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_instance(self) -> Callable[ + [cloud_redis.GetInstanceRequest], + cloud_redis.Instance]: + return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_instance_auth_string(self) -> Callable[ + [cloud_redis.GetInstanceAuthStringRequest], + cloud_redis.InstanceAuthString]: + return self._GetInstanceAuthString(self._session, self._host, self._interceptor) # type: ignore + + @property + def import_instance(self) -> Callable[ + [cloud_redis.ImportInstanceRequest], + operations_pb2.Operation]: + return self._ImportInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_instances(self) -> Callable[ + [cloud_redis.ListInstancesRequest], + cloud_redis.ListInstancesResponse]: + return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def reschedule_maintenance(self) -> Callable[ + [cloud_redis.RescheduleMaintenanceRequest], + operations_pb2.Operation]: + return self._RescheduleMaintenance(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_instance(self) -> Callable[ + [cloud_redis.UpdateInstanceRequest], + operations_pb2.Operation]: + return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def upgrade_instance(self) -> Callable[ + [cloud_redis.UpgradeInstanceRequest], + operations_pb2.Operation]: + return self._UpgradeInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(_BaseCloudRedisRestTransport._BaseGetLocation, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.GetLocation") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + async def __call__(self, + request: locations_pb2.GetLocationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options = _BaseCloudRedisRestTransport._BaseGetLocation._get_http_options() + + request, metadata = await self._interceptor.pre_get_location(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetLocation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetLocation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = await AsyncCloudRedisRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + content = await response.read() + resp = locations_pb2.Location() + resp = json_format.Parse(content, resp) + resp = await self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.GetLocation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetLocation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(_BaseCloudRedisRestTransport._BaseListLocations, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.ListLocations") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + async def __call__(self, + request: locations_pb2.ListLocationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options = _BaseCloudRedisRestTransport._BaseListLocations._get_http_options() + + request, metadata = await self._interceptor.pre_list_locations(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseListLocations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseListLocations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListLocations", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListLocations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = await AsyncCloudRedisRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + content = await response.read() + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(content, resp) + resp = await self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.ListLocations", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListLocations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(_BaseCloudRedisRestTransport._BaseCancelOperation, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.CancelOperation") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + async def __call__(self, + request: operations_pb2.CancelOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseCloudRedisRestTransport._BaseCancelOperation._get_http_options() + + request, metadata = await self._interceptor.pre_cancel_operation(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.CancelOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = await AsyncCloudRedisRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + return await self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(_BaseCloudRedisRestTransport._BaseDeleteOperation, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.DeleteOperation") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + async def __call__(self, + request: operations_pb2.DeleteOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_http_options() + + request, metadata = await self._interceptor.pre_delete_operation(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = await AsyncCloudRedisRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + return await self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(_BaseCloudRedisRestTransport._BaseGetOperation, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.GetOperation") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + async def __call__(self, + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = _BaseCloudRedisRestTransport._BaseGetOperation._get_http_options() + + request, metadata = await self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = await AsyncCloudRedisRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + content = await response.read() + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = await self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.GetOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(_BaseCloudRedisRestTransport._BaseListOperations, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.ListOperations") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + async def __call__(self, + request: operations_pb2.ListOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = _BaseCloudRedisRestTransport._BaseListOperations._get_http_options() + + request, metadata = await self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListOperations", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = await AsyncCloudRedisRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + content = await response.read() + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = await self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.ListOperations", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def wait_operation(self): + return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore + + class _WaitOperation(_BaseCloudRedisRestTransport._BaseWaitOperation, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.WaitOperation") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + async def __call__(self, + request: operations_pb2.WaitOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + + r"""Call the wait operation method over HTTP. + + Args: + request (operations_pb2.WaitOperationRequest): + The request object for WaitOperation method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from WaitOperation method. + """ + + http_options = _BaseCloudRedisRestTransport._BaseWaitOperation._get_http_options() + + request, metadata = await self._interceptor.pre_wait_operation(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseWaitOperation._get_transcoded_request(http_options, request) + + body = _BaseCloudRedisRestTransport._BaseWaitOperation._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseWaitOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.WaitOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "WaitOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = await AsyncCloudRedisRestTransport._WaitOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + content = await response.read() + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = await self._interceptor.post_wait_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.WaitOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "WaitOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest_asyncio" + + async def close(self): + await self._session.close() diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py index 2ed2456c7b..e63a3d8503 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py @@ -20,7 +20,6 @@ from google.protobuf import json_format from google.cloud.location import locations_pb2 # type: ignore from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO -from google.auth import credentials as ga_credentials # type: ignore import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union @@ -45,7 +44,7 @@ class _BaseCloudRedisRestTransport(CloudRedisTransport): def __init__(self, *, host: str = 'redis.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, + credentials: Optional[Any] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, url_scheme: str = 'https', @@ -55,7 +54,7 @@ def __init__(self, *, Args: host (Optional[str]): The hostname to connect to (default: 'redis.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The + credentials (Optional[Any]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the @@ -555,6 +554,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseGetLocation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): @@ -578,6 +579,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseListLocations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): @@ -601,6 +604,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): @@ -624,6 +629,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): @@ -647,6 +654,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): @@ -670,6 +679,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): @@ -692,6 +703,36 @@ def _get_query_params_json(transcoded_request): query_params = json.loads(json.dumps(transcoded_request['query_params'])) return query_params + class _BaseWaitOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/operations/*}:wait', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request['body']) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + __all__=( '_BaseCloudRedisRestTransport', diff --git a/tests/integration/goldens/redis/noxfile.py b/tests/integration/goldens/redis/noxfile.py index 513250d8fb..755a3329f3 100755 --- a/tests/integration/goldens/redis/noxfile.py +++ b/tests/integration/goldens/redis/noxfile.py @@ -29,7 +29,8 @@ "3.9", "3.10", "3.11", - "3.12" + "3.12", + "3.13", ] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -39,7 +40,7 @@ BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.12" +DEFAULT_PYTHON_VERSION = "3.13" nox.sessions = [ "unit", @@ -61,7 +62,7 @@ def unit(session, protobuf_implementation): """Run the unit test suite.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') @@ -94,7 +95,7 @@ def unit(session, protobuf_implementation): def prerelease_deps(session, protobuf_implementation): """Run the unit test suite against pre-release versions of dependencies.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install test environment dependencies @@ -129,7 +130,8 @@ def prerelease_deps(session, protobuf_implementation): "googleapis-common-protos", "google-api-core", "google-auth", - "grpcio", + # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 + "grpcio!=1.67.0rc1", "grpcio-status", "protobuf", "proto-plus", diff --git a/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json b/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json index 174d9dff49..78f872bc4a 100755 --- a/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json +++ b/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json @@ -55,7 +55,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -143,7 +143,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -224,7 +224,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -304,7 +304,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -389,7 +389,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -473,7 +473,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -558,7 +558,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -642,7 +642,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -723,7 +723,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.redis_v1.types.InstanceAuthString", @@ -803,7 +803,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.redis_v1.types.InstanceAuthString", @@ -884,7 +884,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.redis_v1.types.Instance", @@ -964,7 +964,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.redis_v1.types.Instance", @@ -1049,7 +1049,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -1133,7 +1133,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -1214,7 +1214,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.redis_v1.services.cloud_redis.pagers.ListInstancesAsyncPager", @@ -1294,7 +1294,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.redis_v1.services.cloud_redis.pagers.ListInstancesPager", @@ -1383,7 +1383,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -1471,7 +1471,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -1556,7 +1556,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -1640,7 +1640,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -1725,7 +1725,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -1809,7 +1809,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", diff --git a/tests/integration/goldens/redis/setup.py b/tests/integration/goldens/redis/setup.py index 5e8332354d..220725b65c 100755 --- a/tests/integration/goldens/redis/setup.py +++ b/tests/integration/goldens/redis/setup.py @@ -44,8 +44,15 @@ # See https://github.com/googleapis/google-cloud-python/issues/12364 "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", + "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] +extras = { + "async_rest": [ + "google-api-core[grpc] >= 2.21.0, < 3.0.0dev", + "google-auth[aiohttp] >= 2.35.0, <3.0.0dev" + ], +} url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis" package_root = os.path.abspath(os.path.dirname(__file__)) @@ -81,6 +88,7 @@ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Operating System :: OS Independent", "Topic :: Internet", ], @@ -88,6 +96,7 @@ packages=packages, python_requires=">=3.7", install_requires=dependencies, + extras_require=extras, include_package_data=True, zip_safe=False, ) diff --git a/tests/integration/goldens/redis/testing/constraints-3.13.txt b/tests/integration/goldens/redis/testing/constraints-3.13.txt new file mode 100755 index 0000000000..ed7f9aed25 --- /dev/null +++ b/tests/integration/goldens/redis/testing/constraints-3.13.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 460dd43427..ab152fe6d1 100755 --- a/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -23,7 +23,7 @@ import grpc from grpc.experimental import aio -from collections.abc import Iterable +from collections.abc import Iterable, AsyncIterable from google.protobuf import json_format import json import math @@ -31,6 +31,13 @@ from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers +try: + import aiohttp # type: ignore + from google.auth.aio.transport.sessions import AsyncAuthorizedSession + from google.api_core.operations_v1 import AsyncOperationsRestClient + HAS_ASYNC_REST_EXTRA = True +except ImportError: # pragma: NO COVER + HAS_ASYNC_REST_EXTRA = False from requests import Response from requests import Request, PreparedRequest from requests.sessions import Session @@ -79,6 +86,13 @@ } CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -195,6 +209,7 @@ def test__get_universe_domain(): CloudRedisClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." + @pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ (401, CRED_INFO_JSON, True), (403, CRED_INFO_JSON, True), @@ -866,25 +881,6 @@ def test_list_instances(request_type, transport: str = 'grpc'): assert response.unreachable == ['unreachable_value'] -def test_list_instances_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_instances() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.ListInstancesRequest() - - def test_list_instances_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -946,30 +942,6 @@ def test_list_instances_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_instances_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_instances() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.ListInstancesRequest() - - @pytest.mark.asyncio async def test_list_instances_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1042,7 +1014,6 @@ async def test_list_instances_async(transport: str = 'grpc_asyncio', request_typ async def test_list_instances_async_from_dict(): await test_list_instances_async(request_type=dict) - def test_list_instances_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1471,25 +1442,6 @@ def test_get_instance(request_type, transport: str = 'grpc'): assert response.available_maintenance_versions == ['available_maintenance_versions_value'] -def test_get_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.GetInstanceRequest() - - def test_get_instance_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1549,55 +1501,6 @@ def test_get_instance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_instance_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.Instance( - name='name_value', - display_name='display_name_value', - location_id='location_id_value', - alternative_location_id='alternative_location_id_value', - redis_version='redis_version_value', - reserved_ip_range='reserved_ip_range_value', - secondary_ip_range='secondary_ip_range_value', - host='host_value', - port=453, - current_location_id='current_location_id_value', - state=cloud_redis.Instance.State.CREATING, - status_message='status_message_value', - tier=cloud_redis.Instance.Tier.BASIC, - memory_size_gb=1499, - authorized_network='authorized_network_value', - persistence_iam_identity='persistence_iam_identity_value', - connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, - auth_enabled=True, - transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, - replica_count=1384, - read_endpoint='read_endpoint_value', - read_endpoint_port=1920, - read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, - customer_managed_key='customer_managed_key_value', - suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], - maintenance_version='maintenance_version_value', - available_maintenance_versions=['available_maintenance_versions_value'], - )) - await client.get_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.GetInstanceRequest() - - @pytest.mark.asyncio async def test_get_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1720,7 +1623,6 @@ async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type= async def test_get_instance_async_from_dict(): await test_get_instance_async(request_type=dict) - def test_get_instance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1901,25 +1803,6 @@ def test_get_instance_auth_string(request_type, transport: str = 'grpc'): assert response.auth_string == 'auth_string_value' -def test_get_instance_auth_string_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_auth_string), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_instance_auth_string() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.GetInstanceAuthStringRequest() - - def test_get_instance_auth_string_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1979,29 +1862,6 @@ def test_get_instance_auth_string_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_instance_auth_string_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_auth_string), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.InstanceAuthString( - auth_string='auth_string_value', - )) - await client.get_instance_auth_string() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.GetInstanceAuthStringRequest() - - @pytest.mark.asyncio async def test_get_instance_auth_string_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2072,7 +1932,6 @@ async def test_get_instance_auth_string_async(transport: str = 'grpc_asyncio', r async def test_get_instance_auth_string_async_from_dict(): await test_get_instance_auth_string_async(request_type=dict) - def test_get_instance_auth_string_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2250,25 +2109,6 @@ def test_create_instance(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_create_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.CreateInstanceRequest() - - def test_create_instance_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2335,29 +2175,6 @@ def test_create_instance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_instance_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.CreateInstanceRequest() - - @pytest.mark.asyncio async def test_create_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2432,7 +2249,6 @@ async def test_create_instance_async(transport: str = 'grpc_asyncio', request_ty async def test_create_instance_async_from_dict(): await test_create_instance_async(request_type=dict) - def test_create_instance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2632,25 +2448,6 @@ def test_update_instance(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_update_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.UpdateInstanceRequest() - - def test_update_instance_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2713,29 +2510,6 @@ def test_update_instance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_instance_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.UpdateInstanceRequest() - - @pytest.mark.asyncio async def test_update_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2810,7 +2584,6 @@ async def test_update_instance_async(transport: str = 'grpc_asyncio', request_ty async def test_update_instance_async_from_dict(): await test_update_instance_async(request_type=dict) - def test_update_instance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3000,25 +2773,6 @@ def test_upgrade_instance(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_upgrade_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.upgrade_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.upgrade_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.UpgradeInstanceRequest() - - def test_upgrade_instance_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3085,29 +2839,6 @@ def test_upgrade_instance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_upgrade_instance_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.upgrade_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.upgrade_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.UpgradeInstanceRequest() - - @pytest.mark.asyncio async def test_upgrade_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3182,7 +2913,6 @@ async def test_upgrade_instance_async(transport: str = 'grpc_asyncio', request_t async def test_upgrade_instance_async_from_dict(): await test_upgrade_instance_async(request_type=dict) - def test_upgrade_instance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3372,25 +3102,6 @@ def test_import_instance(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_import_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.import_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.ImportInstanceRequest() - - def test_import_instance_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3455,29 +3166,6 @@ def test_import_instance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_import_instance_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.import_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.ImportInstanceRequest() - - @pytest.mark.asyncio async def test_import_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3552,7 +3240,6 @@ async def test_import_instance_async(transport: str = 'grpc_asyncio', request_ty async def test_import_instance_async_from_dict(): await test_import_instance_async(request_type=dict) - def test_import_instance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3742,25 +3429,6 @@ def test_export_instance(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_export_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.export_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.ExportInstanceRequest() - - def test_export_instance_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3825,29 +3493,6 @@ def test_export_instance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_export_instance_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.export_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.ExportInstanceRequest() - - @pytest.mark.asyncio async def test_export_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3922,7 +3567,6 @@ async def test_export_instance_async(transport: str = 'grpc_asyncio', request_ty async def test_export_instance_async_from_dict(): await test_export_instance_async(request_type=dict) - def test_export_instance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4112,25 +3756,6 @@ def test_failover_instance(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_failover_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.failover_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.failover_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.FailoverInstanceRequest() - - def test_failover_instance_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4195,29 +3820,6 @@ def test_failover_instance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_failover_instance_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.failover_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.failover_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.FailoverInstanceRequest() - - @pytest.mark.asyncio async def test_failover_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4292,7 +3894,6 @@ async def test_failover_instance_async(transport: str = 'grpc_asyncio', request_ async def test_failover_instance_async_from_dict(): await test_failover_instance_async(request_type=dict) - def test_failover_instance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4482,25 +4083,6 @@ def test_delete_instance(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_delete_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.DeleteInstanceRequest() - - def test_delete_instance_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4565,29 +4147,6 @@ def test_delete_instance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_instance_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.DeleteInstanceRequest() - - @pytest.mark.asyncio async def test_delete_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4662,7 +4221,6 @@ async def test_delete_instance_async(transport: str = 'grpc_asyncio', request_ty async def test_delete_instance_async_from_dict(): await test_delete_instance_async(request_type=dict) - def test_delete_instance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4842,25 +4400,6 @@ def test_reschedule_maintenance(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_reschedule_maintenance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reschedule_maintenance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.reschedule_maintenance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.RescheduleMaintenanceRequest() - - def test_reschedule_maintenance_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4925,29 +4464,6 @@ def test_reschedule_maintenance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_reschedule_maintenance_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reschedule_maintenance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.reschedule_maintenance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.RescheduleMaintenanceRequest() - - @pytest.mark.asyncio async def test_reschedule_maintenance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5022,7 +4538,6 @@ async def test_reschedule_maintenance_async(transport: str = 'grpc_asyncio', req async def test_reschedule_maintenance_async_from_dict(): await test_reschedule_maintenance_async(request_type=dict) - def test_reschedule_maintenance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5186,44 +4701,6 @@ async def test_reschedule_maintenance_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [ - cloud_redis.ListInstancesRequest, - dict, -]) -def test_list_instances_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloud_redis.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_redis.ListInstancesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_instances(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - def test_list_instances_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5321,6 +4798,7 @@ def test_list_instances_rest_required_fields(request_type=cloud_redis.ListInstan response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_instances(request) @@ -5337,66 +4815,6 @@ def test_list_instances_rest_unset_required_fields(): assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_instances_rest_interceptors(null_interceptor): - transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) - client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_list_instances") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_list_instances") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_redis.ListInstancesRequest.pb(cloud_redis.ListInstancesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloud_redis.ListInstancesResponse.to_json(cloud_redis.ListInstancesResponse()) - - request = cloud_redis.ListInstancesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloud_redis.ListInstancesResponse() - - client.list_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_instances_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.ListInstancesRequest): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_instances(request) - - def test_list_instances_rest_flattened(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5425,6 +4843,7 @@ def test_list_instances_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_instances(**mock_args) @@ -5512,106 +4931,18 @@ def test_list_instances_rest_pager(transport: str = 'rest'): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - cloud_redis.GetInstanceRequest, - dict, -]) -def test_get_instance_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +def test_get_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloud_redis.Instance( - name='name_value', - display_name='display_name_value', - location_id='location_id_value', - alternative_location_id='alternative_location_id_value', - redis_version='redis_version_value', - reserved_ip_range='reserved_ip_range_value', - secondary_ip_range='secondary_ip_range_value', - host='host_value', - port=453, - current_location_id='current_location_id_value', - state=cloud_redis.Instance.State.CREATING, - status_message='status_message_value', - tier=cloud_redis.Instance.Tier.BASIC, - memory_size_gb=1499, - authorized_network='authorized_network_value', - persistence_iam_identity='persistence_iam_identity_value', - connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, - auth_enabled=True, - transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, - replica_count=1384, - read_endpoint='read_endpoint_value', - read_endpoint_port=1920, - read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, - customer_managed_key='customer_managed_key_value', - suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], - maintenance_version='maintenance_version_value', - available_maintenance_versions=['available_maintenance_versions_value'], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_redis.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_instance(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, cloud_redis.Instance) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.location_id == 'location_id_value' - assert response.alternative_location_id == 'alternative_location_id_value' - assert response.redis_version == 'redis_version_value' - assert response.reserved_ip_range == 'reserved_ip_range_value' - assert response.secondary_ip_range == 'secondary_ip_range_value' - assert response.host == 'host_value' - assert response.port == 453 - assert response.current_location_id == 'current_location_id_value' - assert response.state == cloud_redis.Instance.State.CREATING - assert response.status_message == 'status_message_value' - assert response.tier == cloud_redis.Instance.Tier.BASIC - assert response.memory_size_gb == 1499 - assert response.authorized_network == 'authorized_network_value' - assert response.persistence_iam_identity == 'persistence_iam_identity_value' - assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING - assert response.auth_enabled is True - assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION - assert response.replica_count == 1384 - assert response.read_endpoint == 'read_endpoint_value' - assert response.read_endpoint_port == 1920 - assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED - assert response.customer_managed_key == 'customer_managed_key_value' - assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] - assert response.maintenance_version == 'maintenance_version_value' - assert response.available_maintenance_versions == ['available_maintenance_versions_value'] - -def test_get_instance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() # Ensure method has been cached assert client._transport.get_instance in client._transport._wrapped_methods @@ -5695,6 +5026,7 @@ def test_get_instance_rest_required_fields(request_type=cloud_redis.GetInstanceR response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_instance(request) @@ -5711,66 +5043,6 @@ def test_get_instance_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_instance_rest_interceptors(null_interceptor): - transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) - client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_get_instance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_get_instance") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_redis.GetInstanceRequest.pb(cloud_redis.GetInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloud_redis.Instance.to_json(cloud_redis.Instance()) - - request = cloud_redis.GetInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloud_redis.Instance() - - client.get_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.GetInstanceRequest): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_instance(request) - - def test_get_instance_rest_flattened(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5799,6 +5071,7 @@ def test_get_instance_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_instance(**mock_args) @@ -5824,49 +5097,6 @@ def test_get_instance_rest_flattened_error(transport: str = 'rest'): ) -def test_get_instance_rest_error(): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloud_redis.GetInstanceAuthStringRequest, - dict, -]) -def test_get_instance_auth_string_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloud_redis.InstanceAuthString( - auth_string='auth_string_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_redis.InstanceAuthString.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_instance_auth_string(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, cloud_redis.InstanceAuthString) - assert response.auth_string == 'auth_string_value' - def test_get_instance_auth_string_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5962,6 +5192,7 @@ def test_get_instance_auth_string_rest_required_fields(request_type=cloud_redis. response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_instance_auth_string(request) @@ -5978,66 +5209,6 @@ def test_get_instance_auth_string_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_instance_auth_string_rest_interceptors(null_interceptor): - transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) - client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_get_instance_auth_string") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_get_instance_auth_string") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_redis.GetInstanceAuthStringRequest.pb(cloud_redis.GetInstanceAuthStringRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloud_redis.InstanceAuthString.to_json(cloud_redis.InstanceAuthString()) - - request = cloud_redis.GetInstanceAuthStringRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloud_redis.InstanceAuthString() - - client.get_instance_auth_string(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_instance_auth_string_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.GetInstanceAuthStringRequest): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_instance_auth_string(request) - - def test_get_instance_auth_string_rest_flattened(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6066,6 +5237,7 @@ def test_get_instance_auth_string_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_instance_auth_string(**mock_args) @@ -6091,123 +5263,21 @@ def test_get_instance_auth_string_rest_flattened_error(transport: str = 'rest'): ) -def test_get_instance_auth_string_rest_error(): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - +def test_create_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) -@pytest.mark.parametrize("request_type", [ - cloud_redis.CreateInstanceRequest, - dict, -]) -def test_create_instance_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["instance"] = {'name': 'name_value', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = cloud_redis.CreateInstanceRequest.meta.fields["instance"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["instance"][field])): - del request_init["instance"][field][i][subfield] - else: - del request_init["instance"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_instance(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - -def test_create_instance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_instance in client._transport._wrapped_methods + # Ensure method has been cached + assert client._transport.create_instance in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -6299,6 +5369,7 @@ def test_create_instance_rest_required_fields(request_type=cloud_redis.CreateIns response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_instance(request) @@ -6319,67 +5390,6 @@ def test_create_instance_rest_unset_required_fields(): assert set(unset_fields) == (set(("instanceId", )) & set(("parent", "instanceId", "instance", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_instance_rest_interceptors(null_interceptor): - transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) - client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_create_instance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_create_instance") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_redis.CreateInstanceRequest.pb(cloud_redis.CreateInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = cloud_redis.CreateInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.create_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.CreateInstanceRequest): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_instance(request) - - def test_create_instance_rest_flattened(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6408,6 +5418,7 @@ def test_create_instance_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_instance(**mock_args) @@ -6435,108 +5446,6 @@ def test_create_instance_rest_flattened_error(transport: str = 'rest'): ) -def test_create_instance_rest_error(): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloud_redis.UpdateInstanceRequest, - dict, -]) -def test_update_instance_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} - request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = cloud_redis.UpdateInstanceRequest.meta.fields["instance"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["instance"][field])): - del request_init["instance"][field][i][subfield] - else: - del request_init["instance"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_instance(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - def test_update_instance_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -6631,6 +5540,7 @@ def test_update_instance_rest_required_fields(request_type=cloud_redis.UpdateIns response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_instance(request) @@ -6647,69 +5557,8 @@ def test_update_instance_rest_unset_required_fields(): assert set(unset_fields) == (set(("updateMask", )) & set(("updateMask", "instance", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_instance_rest_interceptors(null_interceptor): - transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) - client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_update_instance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_update_instance") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_redis.UpdateInstanceRequest.pb(cloud_redis.UpdateInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = cloud_redis.UpdateInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.update_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.UpdateInstanceRequest): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_instance(request) - - -def test_update_instance_rest_flattened(): - client = CloudRedisClient( +def test_update_instance_rest_flattened(): + client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -6735,6 +5584,7 @@ def test_update_instance_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.update_instance(**mock_args) @@ -6761,44 +5611,6 @@ def test_update_instance_rest_flattened_error(transport: str = 'rest'): ) -def test_update_instance_rest_error(): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloud_redis.UpgradeInstanceRequest, - dict, -]) -def test_upgrade_instance_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.upgrade_instance(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - def test_upgrade_instance_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -6900,6 +5712,7 @@ def test_upgrade_instance_rest_required_fields(request_type=cloud_redis.UpgradeI response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.upgrade_instance(request) @@ -6916,67 +5729,6 @@ def test_upgrade_instance_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", "redisVersion", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_upgrade_instance_rest_interceptors(null_interceptor): - transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) - client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_upgrade_instance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_upgrade_instance") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_redis.UpgradeInstanceRequest.pb(cloud_redis.UpgradeInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = cloud_redis.UpgradeInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.upgrade_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_upgrade_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.UpgradeInstanceRequest): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.upgrade_instance(request) - - def test_upgrade_instance_rest_flattened(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7004,6 +5756,7 @@ def test_upgrade_instance_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.upgrade_instance(**mock_args) @@ -7030,44 +5783,6 @@ def test_upgrade_instance_rest_flattened_error(transport: str = 'rest'): ) -def test_upgrade_instance_rest_error(): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloud_redis.ImportInstanceRequest, - dict, -]) -def test_import_instance_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.import_instance(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - def test_import_instance_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7165,6 +5880,7 @@ def test_import_instance_rest_required_fields(request_type=cloud_redis.ImportIns response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.import_instance(request) @@ -7181,67 +5897,6 @@ def test_import_instance_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", "inputConfig", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_import_instance_rest_interceptors(null_interceptor): - transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) - client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_import_instance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_import_instance") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_redis.ImportInstanceRequest.pb(cloud_redis.ImportInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = cloud_redis.ImportInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.import_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_import_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.ImportInstanceRequest): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.import_instance(request) - - def test_import_instance_rest_flattened(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7269,6 +5924,7 @@ def test_import_instance_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.import_instance(**mock_args) @@ -7295,44 +5951,6 @@ def test_import_instance_rest_flattened_error(transport: str = 'rest'): ) -def test_import_instance_rest_error(): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloud_redis.ExportInstanceRequest, - dict, -]) -def test_export_instance_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.export_instance(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - def test_export_instance_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7430,6 +6048,7 @@ def test_export_instance_rest_required_fields(request_type=cloud_redis.ExportIns response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.export_instance(request) @@ -7446,69 +6065,8 @@ def test_export_instance_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", "outputConfig", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_instance_rest_interceptors(null_interceptor): - transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) - client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_export_instance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_export_instance") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_redis.ExportInstanceRequest.pb(cloud_redis.ExportInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = cloud_redis.ExportInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.export_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_export_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.ExportInstanceRequest): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.export_instance(request) - - -def test_export_instance_rest_flattened(): - client = CloudRedisClient( +def test_export_instance_rest_flattened(): + client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -7534,6 +6092,7 @@ def test_export_instance_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.export_instance(**mock_args) @@ -7560,44 +6119,6 @@ def test_export_instance_rest_flattened_error(transport: str = 'rest'): ) -def test_export_instance_rest_error(): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloud_redis.FailoverInstanceRequest, - dict, -]) -def test_failover_instance_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.failover_instance(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - def test_failover_instance_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7695,6 +6216,7 @@ def test_failover_instance_rest_required_fields(request_type=cloud_redis.Failove response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.failover_instance(request) @@ -7711,67 +6233,6 @@ def test_failover_instance_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_failover_instance_rest_interceptors(null_interceptor): - transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) - client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_failover_instance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_failover_instance") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_redis.FailoverInstanceRequest.pb(cloud_redis.FailoverInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = cloud_redis.FailoverInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.failover_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_failover_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.FailoverInstanceRequest): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.failover_instance(request) - - def test_failover_instance_rest_flattened(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7799,6 +6260,7 @@ def test_failover_instance_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.failover_instance(**mock_args) @@ -7825,44 +6287,6 @@ def test_failover_instance_rest_flattened_error(transport: str = 'rest'): ) -def test_failover_instance_rest_error(): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloud_redis.DeleteInstanceRequest, - dict, -]) -def test_delete_instance_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_instance(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - def test_delete_instance_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7959,6 +6383,7 @@ def test_delete_instance_rest_required_fields(request_type=cloud_redis.DeleteIns response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_instance(request) @@ -7975,67 +6400,6 @@ def test_delete_instance_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_instance_rest_interceptors(null_interceptor): - transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) - client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_delete_instance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_delete_instance") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_redis.DeleteInstanceRequest.pb(cloud_redis.DeleteInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = cloud_redis.DeleteInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.delete_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_delete_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.DeleteInstanceRequest): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_instance(request) - - def test_delete_instance_rest_flattened(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8062,6 +6426,7 @@ def test_delete_instance_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_instance(**mock_args) @@ -8087,44 +6452,6 @@ def test_delete_instance_rest_flattened_error(transport: str = 'rest'): ) -def test_delete_instance_rest_error(): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloud_redis.RescheduleMaintenanceRequest, - dict, -]) -def test_reschedule_maintenance_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.reschedule_maintenance(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - def test_reschedule_maintenance_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -8222,6 +6549,7 @@ def test_reschedule_maintenance_rest_required_fields(request_type=cloud_redis.Re response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.reschedule_maintenance(request) @@ -8238,67 +6566,6 @@ def test_reschedule_maintenance_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", "rescheduleType", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_reschedule_maintenance_rest_interceptors(null_interceptor): - transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) - client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_reschedule_maintenance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_reschedule_maintenance") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_redis.RescheduleMaintenanceRequest.pb(cloud_redis.RescheduleMaintenanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = cloud_redis.RescheduleMaintenanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.reschedule_maintenance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_reschedule_maintenance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.RescheduleMaintenanceRequest): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.reschedule_maintenance(request) - - def test_reschedule_maintenance_rest_flattened(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8327,6 +6594,7 @@ def test_reschedule_maintenance_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.reschedule_maintenance(**mock_args) @@ -8354,13 +6622,6 @@ def test_reschedule_maintenance_rest_flattened_error(transport: str = 'rest'): ) -def test_reschedule_maintenance_rest_error(): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.CloudRedisGrpcTransport( @@ -8455,931 +6716,5252 @@ def test_transport_kind_grpc(): assert transport.kind == "grpc" -def test_transport_kind_grpc_asyncio(): - transport = CloudRedisAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() +def test_initialize_client_w_grpc(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" ) - assert transport.kind == "grpc_asyncio" + assert client is not None -def test_transport_kind_rest(): - transport = CloudRedisClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_instances_empty_call_grpc(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - assert transport.kind == "rest" + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + call.return_value = cloud_redis.ListInstancesResponse() + client.list_instances(request=None) -def test_transport_kind_rest_asyncio(): - transport = CloudRedisAsyncClient.get_transport_class("rest_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "rest_asyncio" + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.ListInstancesRequest() + assert args[0] == request_msg -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_instance_empty_call_grpc(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - assert isinstance( - client.transport, - transports.CloudRedisGrpcTransport, + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + call.return_value = cloud_redis.Instance() + client.get_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.GetInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_instance_auth_string_empty_call_grpc(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) -def test_cloud_redis_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.CloudRedisTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_auth_string), + '__call__') as call: + call.return_value = cloud_redis.InstanceAuthString() + client.get_instance_auth_string(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.GetInstanceAuthStringRequest() -def test_cloud_redis_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.CloudRedisTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) + assert args[0] == request_msg - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_instances', - 'get_instance', - 'get_instance_auth_string', - 'create_instance', - 'update_instance', - 'upgrade_instance', - 'import_instance', - 'export_instance', - 'failover_instance', - 'delete_instance', - 'reschedule_maintenance', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_instance_empty_call_grpc(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - with pytest.raises(NotImplementedError): - transport.close() + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_instance(request=None) - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.CreateInstanceRequest() - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() + assert args[0] == request_msg -def test_cloud_redis_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CloudRedisTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_instance_empty_call_grpc(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_instance(request=None) -def test_cloud_redis_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CloudRedisTransport() - adc.assert_called_once() + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.UpdateInstanceRequest() + assert args[0] == request_msg -def test_cloud_redis_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - CloudRedisClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_upgrade_instance_empty_call_grpc(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudRedisGrpcTransport, - transports.CloudRedisGrpcAsyncIOTransport, - ], -) -def test_cloud_redis_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.upgrade_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.upgrade_instance(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.UpgradeInstanceRequest() -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudRedisGrpcTransport, - transports.CloudRedisGrpcAsyncIOTransport, - transports.CloudRedisRestTransport, - ], -) -def test_cloud_redis_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) + assert args[0] == request_msg -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.CloudRedisGrpcTransport, grpc_helpers), - (transports.CloudRedisGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_cloud_redis_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_import_instance_empty_call_grpc(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - create_channel.assert_called_with( - "redis.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="redis.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.import_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.import_instance(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.ImportInstanceRequest() -@pytest.mark.parametrize("transport_class", [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport]) -def test_cloud_redis_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() + assert args[0] == request_msg - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_export_instance_empty_call_grpc(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) -def test_cloud_redis_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.CloudRedisRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.export_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.export_instance(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.ExportInstanceRequest() -def test_cloud_redis_rest_lro_client(): + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_failover_instance_empty_call_grpc(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="grpc", ) - transport = client.transport - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.AbstractOperationsClient, - ) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.failover_instance(request=None) - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.FailoverInstanceRequest() + assert args[0] == request_msg -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_cloud_redis_host_no_port(transport_name): + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_instance_empty_call_grpc(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='redis.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'redis.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://redis.googleapis.com' + transport="grpc", ) -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_cloud_redis_host_with_port(transport_name): + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.DeleteInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_reschedule_maintenance_empty_call_grpc(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='redis.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'redis.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://redis.googleapis.com:8000' + transport="grpc", ) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_cloud_redis_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = CloudRedisClient( - credentials=creds1, - transport=transport_name, - ) - client2 = CloudRedisClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.list_instances._session - session2 = client2.transport.list_instances._session - assert session1 != session2 - session1 = client1.transport.get_instance._session - session2 = client2.transport.get_instance._session - assert session1 != session2 - session1 = client1.transport.get_instance_auth_string._session - session2 = client2.transport.get_instance_auth_string._session - assert session1 != session2 - session1 = client1.transport.create_instance._session - session2 = client2.transport.create_instance._session - assert session1 != session2 - session1 = client1.transport.update_instance._session - session2 = client2.transport.update_instance._session - assert session1 != session2 - session1 = client1.transport.upgrade_instance._session - session2 = client2.transport.upgrade_instance._session - assert session1 != session2 - session1 = client1.transport.import_instance._session - session2 = client2.transport.import_instance._session - assert session1 != session2 - session1 = client1.transport.export_instance._session - session2 = client2.transport.export_instance._session - assert session1 != session2 - session1 = client1.transport.failover_instance._session - session2 = client2.transport.failover_instance._session - assert session1 != session2 - session1 = client1.transport.delete_instance._session - session2 = client2.transport.delete_instance._session - assert session1 != session2 - session1 = client1.transport.reschedule_maintenance._session - session2 = client2.transport.reschedule_maintenance._session - assert session1 != session2 -def test_cloud_redis_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.reschedule_maintenance(request=None) - # Check that channel is used if provided. - transport = transports.CloudRedisGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.RescheduleMaintenanceRequest() + assert args[0] == request_msg -def test_cloud_redis_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - # Check that channel is used if provided. - transport = transports.CloudRedisGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, +def test_transport_kind_grpc_asyncio(): + transport = CloudRedisAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - + assert transport.kind == "grpc_asyncio" -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport]) -def test_cloud_redis_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel +def test_initialize_client_w_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_instances_empty_call_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport]) -def test_cloud_redis_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.ListInstancesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_instances(request=None) - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.ListInstancesRequest() - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel + assert args[0] == request_msg -def test_cloud_redis_grpc_lro_client(): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_instance_empty_call_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - transport = client.transport - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.Instance( + name='name_value', + display_name='display_name_value', + location_id='location_id_value', + alternative_location_id='alternative_location_id_value', + redis_version='redis_version_value', + reserved_ip_range='reserved_ip_range_value', + secondary_ip_range='secondary_ip_range_value', + host='host_value', + port=453, + current_location_id='current_location_id_value', + state=cloud_redis.Instance.State.CREATING, + status_message='status_message_value', + tier=cloud_redis.Instance.Tier.BASIC, + memory_size_gb=1499, + authorized_network='authorized_network_value', + persistence_iam_identity='persistence_iam_identity_value', + connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, + auth_enabled=True, + transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, + replica_count=1384, + read_endpoint='read_endpoint_value', + read_endpoint_port=1920, + read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, + customer_managed_key='customer_managed_key_value', + suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], + maintenance_version='maintenance_version_value', + available_maintenance_versions=['available_maintenance_versions_value'], + )) + await client.get_instance(request=None) - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.GetInstanceRequest() + assert args[0] == request_msg -def test_cloud_redis_grpc_lro_async_client(): - client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_instance_auth_string_empty_call_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_auth_string), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.InstanceAuthString( + auth_string='auth_string_value', + )) + await client.get_instance_auth_string(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.GetInstanceAuthStringRequest() -def test_instance_path(): - project = "squid" - location = "clam" - instance = "whelk" - expected = "projects/{project}/locations/{location}/instances/{instance}".format(project=project, location=location, instance=instance, ) - actual = CloudRedisClient.instance_path(project, location, instance) - assert expected == actual + assert args[0] == request_msg -def test_parse_instance_path(): - expected = { - "project": "octopus", - "location": "oyster", - "instance": "nudibranch", - } - path = CloudRedisClient.instance_path(**expected) +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_instance_empty_call_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) - # Check that the path construction is reversible. - actual = CloudRedisClient.parse_instance_path(path) - assert expected == actual + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_instance(request=None) -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = CloudRedisClient.common_billing_account_path(billing_account) - assert expected == actual + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.CreateInstanceRequest() + assert args[0] == request_msg -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = CloudRedisClient.common_billing_account_path(**expected) - # Check that the path construction is reversible. - actual = CloudRedisClient.parse_common_billing_account_path(path) - assert expected == actual +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_instance_empty_call_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format(folder=folder, ) - actual = CloudRedisClient.common_folder_path(folder) - assert expected == actual + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_instance(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.UpdateInstanceRequest() -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = CloudRedisClient.common_folder_path(**expected) + assert args[0] == request_msg - # Check that the path construction is reversible. - actual = CloudRedisClient.parse_common_folder_path(path) - assert expected == actual -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format(organization=organization, ) - actual = CloudRedisClient.common_organization_path(organization) - assert expected == actual +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_upgrade_instance_empty_call_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.upgrade_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.upgrade_instance(request=None) -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = CloudRedisClient.common_organization_path(**expected) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.UpgradeInstanceRequest() - # Check that the path construction is reversible. - actual = CloudRedisClient.parse_common_organization_path(path) - assert expected == actual + assert args[0] == request_msg -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format(project=project, ) - actual = CloudRedisClient.common_project_path(project) - assert expected == actual +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_import_instance_empty_call_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = CloudRedisClient.common_project_path(**expected) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.import_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.import_instance(request=None) - # Check that the path construction is reversible. - actual = CloudRedisClient.parse_common_project_path(path) - assert expected == actual + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.ImportInstanceRequest() -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = CloudRedisClient.common_location_path(project, location) - assert expected == actual + assert args[0] == request_msg -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = CloudRedisClient.common_location_path(**expected) +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_export_instance_empty_call_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) - # Check that the path construction is reversible. - actual = CloudRedisClient.parse_common_location_path(path) - assert expected == actual + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.export_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.export_instance(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.ExportInstanceRequest() -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() + assert args[0] == request_msg - with mock.patch.object(transports.CloudRedisTransport, '_prep_wrapped_messages') as prep: - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_failover_instance_empty_call_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') ) - prep.assert_called_once_with(client_info) + await client.failover_instance(request=None) - with mock.patch.object(transports.CloudRedisTransport, '_prep_wrapped_messages') as prep: - transport_class = CloudRedisClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.FailoverInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_instance_empty_call_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') ) - prep.assert_called_once_with(client_info) + await client.delete_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.DeleteInstanceRequest() + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_transport_close_async(): +async def test_reschedule_maintenance_empty_call_grpc_asyncio(): client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="grpc_asyncio", ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.reschedule_maintenance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.RescheduleMaintenanceRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = CloudRedisClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" -def test_get_location_rest_bad_request(transport: str = 'rest', request_type=locations_pb2.GetLocationRequest): +def test_list_instances_rest_bad_request(request_type=cloud_redis.ListInstancesRequest): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest" ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_instances(request) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.ListInstancesRequest, + dict, +]) +def test_list_instances_rest_call_success(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis.ListInstancesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_redis.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_instances_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_list_instances") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_list_instances_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_list_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.ListInstancesRequest.pb(cloud_redis.ListInstancesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = cloud_redis.ListInstancesResponse.to_json(cloud_redis.ListInstancesResponse()) + req.return_value.content = return_value + + request = cloud_redis.ListInstancesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_redis.ListInstancesResponse() + post_with_metadata.return_value = cloud_redis.ListInstancesResponse(), metadata + + client.list_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_instance_rest_bad_request(request_type=cloud_redis.GetInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): # Wrap the value into a proper Response obj - response_value = Response() + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 - response_value.request = Request() + response_value.request = mock.Mock() req.return_value = response_value - client.get_location(request) + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_instance(request) + @pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, + cloud_redis.GetInstanceRequest, + dict, ]) -def test_get_location_rest(request_type): +def test_get_instance_rest_call_success(request_type): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="rest" ) - request_init = {'name': 'projects/sample1/locations/sample2'} + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} request = request_type(**request_init) + # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() + return_value = cloud_redis.Instance( + name='name_value', + display_name='display_name_value', + location_id='location_id_value', + alternative_location_id='alternative_location_id_value', + redis_version='redis_version_value', + reserved_ip_range='reserved_ip_range_value', + secondary_ip_range='secondary_ip_range_value', + host='host_value', + port=453, + current_location_id='current_location_id_value', + state=cloud_redis.Instance.State.CREATING, + status_message='status_message_value', + tier=cloud_redis.Instance.Tier.BASIC, + memory_size_gb=1499, + authorized_network='authorized_network_value', + persistence_iam_identity='persistence_iam_identity_value', + connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, + auth_enabled=True, + transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, + replica_count=1384, + read_endpoint='read_endpoint_value', + read_endpoint_port=1920, + read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, + customer_managed_key='customer_managed_key_value', + suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], + maintenance_version='maintenance_version_value', + available_maintenance_versions=['available_maintenance_versions_value'], + ) # Wrap the value into a proper Response obj - response_value = Response() + response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_redis.Instance.pb(return_value) json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_instance(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_redis.Instance) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.location_id == 'location_id_value' + assert response.alternative_location_id == 'alternative_location_id_value' + assert response.redis_version == 'redis_version_value' + assert response.reserved_ip_range == 'reserved_ip_range_value' + assert response.secondary_ip_range == 'secondary_ip_range_value' + assert response.host == 'host_value' + assert response.port == 453 + assert response.current_location_id == 'current_location_id_value' + assert response.state == cloud_redis.Instance.State.CREATING + assert response.status_message == 'status_message_value' + assert response.tier == cloud_redis.Instance.Tier.BASIC + assert response.memory_size_gb == 1499 + assert response.authorized_network == 'authorized_network_value' + assert response.persistence_iam_identity == 'persistence_iam_identity_value' + assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING + assert response.auth_enabled is True + assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + assert response.replica_count == 1384 + assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint_port == 1920 + assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED + assert response.customer_managed_key == 'customer_managed_key_value' + assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] + assert response.maintenance_version == 'maintenance_version_value' + assert response.available_maintenance_versions == ['available_maintenance_versions_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_instance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_get_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_get_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_get_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.GetInstanceRequest.pb(cloud_redis.GetInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = cloud_redis.Instance.to_json(cloud_redis.Instance()) + req.return_value.content = return_value + + request = cloud_redis.GetInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_redis.Instance() + post_with_metadata.return_value = cloud_redis.Instance(), metadata + + client.get_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_instance_auth_string_rest_bad_request(request_type=cloud_redis.GetInstanceAuthStringRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_instance_auth_string(request) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.GetInstanceAuthStringRequest, + dict, +]) +def test_get_instance_auth_string_rest_call_success(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis.InstanceAuthString( + auth_string='auth_string_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_redis.InstanceAuthString.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_instance_auth_string(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_redis.InstanceAuthString) + assert response.auth_string == 'auth_string_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_instance_auth_string_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_get_instance_auth_string") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_get_instance_auth_string_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_get_instance_auth_string") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.GetInstanceAuthStringRequest.pb(cloud_redis.GetInstanceAuthStringRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = cloud_redis.InstanceAuthString.to_json(cloud_redis.InstanceAuthString()) + req.return_value.content = return_value + + request = cloud_redis.GetInstanceAuthStringRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_redis.InstanceAuthString() + post_with_metadata.return_value = cloud_redis.InstanceAuthString(), metadata + + client.get_instance_auth_string(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_instance_rest_bad_request(request_type=cloud_redis.CreateInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_instance(request) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.CreateInstanceRequest, + dict, +]) +def test_create_instance_rest_call_success(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["instance"] = {'name': 'name_value', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_redis.CreateInstanceRequest.meta.fields["instance"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["instance"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance"][field])): + del request_init["instance"][field][i][subfield] + else: + del request_init["instance"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_instance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_create_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_create_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_create_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.CreateInstanceRequest.pb(cloud_redis.CreateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = cloud_redis.CreateInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_instance_rest_bad_request(request_type=cloud_redis.UpdateInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_instance(request) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.UpdateInstanceRequest, + dict, +]) +def test_update_instance_rest_call_success(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_redis.UpdateInstanceRequest.meta.fields["instance"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["instance"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance"][field])): + del request_init["instance"][field][i][subfield] + else: + del request_init["instance"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_instance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_update_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_update_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_update_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.UpdateInstanceRequest.pb(cloud_redis.UpdateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = cloud_redis.UpdateInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_upgrade_instance_rest_bad_request(request_type=cloud_redis.UpgradeInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.upgrade_instance(request) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.UpgradeInstanceRequest, + dict, +]) +def test_upgrade_instance_rest_call_success(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.upgrade_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_upgrade_instance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_upgrade_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_upgrade_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_upgrade_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.UpgradeInstanceRequest.pb(cloud_redis.UpgradeInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = cloud_redis.UpgradeInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.upgrade_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_import_instance_rest_bad_request(request_type=cloud_redis.ImportInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.import_instance(request) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.ImportInstanceRequest, + dict, +]) +def test_import_instance_rest_call_success(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.import_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_import_instance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_import_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_import_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_import_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.ImportInstanceRequest.pb(cloud_redis.ImportInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = cloud_redis.ImportInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.import_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_export_instance_rest_bad_request(request_type=cloud_redis.ExportInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.export_instance(request) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.ExportInstanceRequest, + dict, +]) +def test_export_instance_rest_call_success(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.export_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_export_instance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_export_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_export_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_export_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.ExportInstanceRequest.pb(cloud_redis.ExportInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = cloud_redis.ExportInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.export_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_failover_instance_rest_bad_request(request_type=cloud_redis.FailoverInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.failover_instance(request) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.FailoverInstanceRequest, + dict, +]) +def test_failover_instance_rest_call_success(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.failover_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_failover_instance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_failover_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_failover_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_failover_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.FailoverInstanceRequest.pb(cloud_redis.FailoverInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = cloud_redis.FailoverInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.failover_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_instance_rest_bad_request(request_type=cloud_redis.DeleteInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_instance(request) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.DeleteInstanceRequest, + dict, +]) +def test_delete_instance_rest_call_success(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_instance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_delete_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_delete_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_delete_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.DeleteInstanceRequest.pb(cloud_redis.DeleteInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = cloud_redis.DeleteInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_reschedule_maintenance_rest_bad_request(request_type=cloud_redis.RescheduleMaintenanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.reschedule_maintenance(request) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.RescheduleMaintenanceRequest, + dict, +]) +def test_reschedule_maintenance_rest_call_success(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.reschedule_maintenance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_reschedule_maintenance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_reschedule_maintenance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_reschedule_maintenance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_reschedule_maintenance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.RescheduleMaintenanceRequest.pb(cloud_redis.RescheduleMaintenanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = cloud_redis.RescheduleMaintenanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.reschedule_maintenance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_location(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.GetLocationRequest, + dict, +]) +def test_get_location_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_locations(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.ListLocationsRequest, + dict, +]) +def test_list_locations_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +def test_cancel_operation_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.DeleteOperationRequest, + dict, +]) +def test_delete_operation_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +def test_list_operations_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_wait_operation_rest_bad_request(request_type=operations_pb2.WaitOperationRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.wait_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.WaitOperationRequest, + dict, +]) +def test_wait_operation_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.wait_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_initialize_client_w_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_instances_empty_call_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + client.list_instances(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.ListInstancesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_instance_empty_call_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + client.get_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.GetInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_instance_auth_string_empty_call_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_auth_string), + '__call__') as call: + client.get_instance_auth_string(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.GetInstanceAuthStringRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_instance_empty_call_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + client.create_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.CreateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_instance_empty_call_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + client.update_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.UpdateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_upgrade_instance_empty_call_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.upgrade_instance), + '__call__') as call: + client.upgrade_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.UpgradeInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_import_instance_empty_call_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.import_instance), + '__call__') as call: + client.import_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.ImportInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_export_instance_empty_call_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.export_instance), + '__call__') as call: + client.export_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.ExportInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_failover_instance_empty_call_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), + '__call__') as call: + client.failover_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.FailoverInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_instance_empty_call_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + client.delete_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.DeleteInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_reschedule_maintenance_empty_call_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), + '__call__') as call: + client.reschedule_maintenance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.RescheduleMaintenanceRequest() + + assert args[0] == request_msg + + +def test_cloud_redis_rest_lro_client(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, +operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + +def test_transport_kind_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + transport = CloudRedisAsyncClient.get_transport_class("rest_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "rest_asyncio" + + +@pytest.mark.asyncio +async def test_list_instances_rest_asyncio_bad_request(request_type=cloud_redis.ListInstancesRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + await client.list_instances(request) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + cloud_redis.ListInstancesRequest, + dict, +]) +async def test_list_instances_rest_asyncio_call_success(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis.ListInstancesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_redis.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = await client.list_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +@pytest.mark.parametrize("null_interceptor", [True, False]) +async def test_list_instances_rest_asyncio_interceptors(null_interceptor): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + transport = transports.AsyncCloudRedisRestTransport( + credentials=async_anonymous_credentials(), + interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), + ) + client = CloudRedisAsyncClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_list_instances") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_list_instances_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_list_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.ListInstancesRequest.pb(cloud_redis.ListInstancesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = cloud_redis.ListInstancesResponse.to_json(cloud_redis.ListInstancesResponse()) + req.return_value.read = mock.AsyncMock(return_value=return_value) + + request = cloud_redis.ListInstancesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_redis.ListInstancesResponse() + post_with_metadata.return_value = cloud_redis.ListInstancesResponse(), metadata + + await client.list_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + +@pytest.mark.asyncio +async def test_get_instance_rest_asyncio_bad_request(request_type=cloud_redis.GetInstanceRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + await client.get_instance(request) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + cloud_redis.GetInstanceRequest, + dict, +]) +async def test_get_instance_rest_asyncio_call_success(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis.Instance( + name='name_value', + display_name='display_name_value', + location_id='location_id_value', + alternative_location_id='alternative_location_id_value', + redis_version='redis_version_value', + reserved_ip_range='reserved_ip_range_value', + secondary_ip_range='secondary_ip_range_value', + host='host_value', + port=453, + current_location_id='current_location_id_value', + state=cloud_redis.Instance.State.CREATING, + status_message='status_message_value', + tier=cloud_redis.Instance.Tier.BASIC, + memory_size_gb=1499, + authorized_network='authorized_network_value', + persistence_iam_identity='persistence_iam_identity_value', + connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, + auth_enabled=True, + transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, + replica_count=1384, + read_endpoint='read_endpoint_value', + read_endpoint_port=1920, + read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, + customer_managed_key='customer_managed_key_value', + suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], + maintenance_version='maintenance_version_value', + available_maintenance_versions=['available_maintenance_versions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_redis.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = await client.get_instance(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_redis.Instance) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.location_id == 'location_id_value' + assert response.alternative_location_id == 'alternative_location_id_value' + assert response.redis_version == 'redis_version_value' + assert response.reserved_ip_range == 'reserved_ip_range_value' + assert response.secondary_ip_range == 'secondary_ip_range_value' + assert response.host == 'host_value' + assert response.port == 453 + assert response.current_location_id == 'current_location_id_value' + assert response.state == cloud_redis.Instance.State.CREATING + assert response.status_message == 'status_message_value' + assert response.tier == cloud_redis.Instance.Tier.BASIC + assert response.memory_size_gb == 1499 + assert response.authorized_network == 'authorized_network_value' + assert response.persistence_iam_identity == 'persistence_iam_identity_value' + assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING + assert response.auth_enabled is True + assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + assert response.replica_count == 1384 + assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint_port == 1920 + assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED + assert response.customer_managed_key == 'customer_managed_key_value' + assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] + assert response.maintenance_version == 'maintenance_version_value' + assert response.available_maintenance_versions == ['available_maintenance_versions_value'] + + +@pytest.mark.asyncio +@pytest.mark.parametrize("null_interceptor", [True, False]) +async def test_get_instance_rest_asyncio_interceptors(null_interceptor): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + transport = transports.AsyncCloudRedisRestTransport( + credentials=async_anonymous_credentials(), + interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), + ) + client = CloudRedisAsyncClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_get_instance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_get_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_get_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.GetInstanceRequest.pb(cloud_redis.GetInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = cloud_redis.Instance.to_json(cloud_redis.Instance()) + req.return_value.read = mock.AsyncMock(return_value=return_value) + + request = cloud_redis.GetInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_redis.Instance() + post_with_metadata.return_value = cloud_redis.Instance(), metadata + + await client.get_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + +@pytest.mark.asyncio +async def test_get_instance_auth_string_rest_asyncio_bad_request(request_type=cloud_redis.GetInstanceAuthStringRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + await client.get_instance_auth_string(request) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + cloud_redis.GetInstanceAuthStringRequest, + dict, +]) +async def test_get_instance_auth_string_rest_asyncio_call_success(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis.InstanceAuthString( + auth_string='auth_string_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_redis.InstanceAuthString.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = await client.get_instance_auth_string(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_redis.InstanceAuthString) + assert response.auth_string == 'auth_string_value' + + +@pytest.mark.asyncio +@pytest.mark.parametrize("null_interceptor", [True, False]) +async def test_get_instance_auth_string_rest_asyncio_interceptors(null_interceptor): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + transport = transports.AsyncCloudRedisRestTransport( + credentials=async_anonymous_credentials(), + interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), + ) + client = CloudRedisAsyncClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_get_instance_auth_string") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_get_instance_auth_string_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_get_instance_auth_string") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.GetInstanceAuthStringRequest.pb(cloud_redis.GetInstanceAuthStringRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = cloud_redis.InstanceAuthString.to_json(cloud_redis.InstanceAuthString()) + req.return_value.read = mock.AsyncMock(return_value=return_value) + + request = cloud_redis.GetInstanceAuthStringRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_redis.InstanceAuthString() + post_with_metadata.return_value = cloud_redis.InstanceAuthString(), metadata + + await client.get_instance_auth_string(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + +@pytest.mark.asyncio +async def test_create_instance_rest_asyncio_bad_request(request_type=cloud_redis.CreateInstanceRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + await client.create_instance(request) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + cloud_redis.CreateInstanceRequest, + dict, +]) +async def test_create_instance_rest_asyncio_call_success(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["instance"] = {'name': 'name_value', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_redis.CreateInstanceRequest.meta.fields["instance"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["instance"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance"][field])): + del request_init["instance"][field][i][subfield] + else: + del request_init["instance"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = await client.create_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("null_interceptor", [True, False]) +async def test_create_instance_rest_asyncio_interceptors(null_interceptor): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + transport = transports.AsyncCloudRedisRestTransport( + credentials=async_anonymous_credentials(), + interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), + ) + client = CloudRedisAsyncClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_create_instance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_create_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_create_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.CreateInstanceRequest.pb(cloud_redis.CreateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.read = mock.AsyncMock(return_value=return_value) + + request = cloud_redis.CreateInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + await client.create_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + +@pytest.mark.asyncio +async def test_update_instance_rest_asyncio_bad_request(request_type=cloud_redis.UpdateInstanceRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + # send a request that will satisfy transcoding + request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + await client.update_instance(request) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + cloud_redis.UpdateInstanceRequest, + dict, +]) +async def test_update_instance_rest_asyncio_call_success(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + + # send a request that will satisfy transcoding + request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_redis.UpdateInstanceRequest.meta.fields["instance"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["instance"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance"][field])): + del request_init["instance"][field][i][subfield] + else: + del request_init["instance"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = await client.update_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("null_interceptor", [True, False]) +async def test_update_instance_rest_asyncio_interceptors(null_interceptor): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + transport = transports.AsyncCloudRedisRestTransport( + credentials=async_anonymous_credentials(), + interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), + ) + client = CloudRedisAsyncClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_update_instance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_update_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_update_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.UpdateInstanceRequest.pb(cloud_redis.UpdateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.read = mock.AsyncMock(return_value=return_value) + + request = cloud_redis.UpdateInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + await client.update_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + +@pytest.mark.asyncio +async def test_upgrade_instance_rest_asyncio_bad_request(request_type=cloud_redis.UpgradeInstanceRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + await client.upgrade_instance(request) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + cloud_redis.UpgradeInstanceRequest, + dict, +]) +async def test_upgrade_instance_rest_asyncio_call_success(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = await client.upgrade_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("null_interceptor", [True, False]) +async def test_upgrade_instance_rest_asyncio_interceptors(null_interceptor): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + transport = transports.AsyncCloudRedisRestTransport( + credentials=async_anonymous_credentials(), + interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), + ) + client = CloudRedisAsyncClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_upgrade_instance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_upgrade_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_upgrade_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.UpgradeInstanceRequest.pb(cloud_redis.UpgradeInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.read = mock.AsyncMock(return_value=return_value) + + request = cloud_redis.UpgradeInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + await client.upgrade_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + +@pytest.mark.asyncio +async def test_import_instance_rest_asyncio_bad_request(request_type=cloud_redis.ImportInstanceRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + await client.import_instance(request) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + cloud_redis.ImportInstanceRequest, + dict, +]) +async def test_import_instance_rest_asyncio_call_success(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = await client.import_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("null_interceptor", [True, False]) +async def test_import_instance_rest_asyncio_interceptors(null_interceptor): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + transport = transports.AsyncCloudRedisRestTransport( + credentials=async_anonymous_credentials(), + interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), + ) + client = CloudRedisAsyncClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_import_instance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_import_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_import_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.ImportInstanceRequest.pb(cloud_redis.ImportInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.read = mock.AsyncMock(return_value=return_value) + + request = cloud_redis.ImportInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + await client.import_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + +@pytest.mark.asyncio +async def test_export_instance_rest_asyncio_bad_request(request_type=cloud_redis.ExportInstanceRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + await client.export_instance(request) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + cloud_redis.ExportInstanceRequest, + dict, +]) +async def test_export_instance_rest_asyncio_call_success(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = await client.export_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("null_interceptor", [True, False]) +async def test_export_instance_rest_asyncio_interceptors(null_interceptor): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + transport = transports.AsyncCloudRedisRestTransport( + credentials=async_anonymous_credentials(), + interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), + ) + client = CloudRedisAsyncClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_export_instance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_export_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_export_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.ExportInstanceRequest.pb(cloud_redis.ExportInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.read = mock.AsyncMock(return_value=return_value) + + request = cloud_redis.ExportInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + await client.export_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + +@pytest.mark.asyncio +async def test_failover_instance_rest_asyncio_bad_request(request_type=cloud_redis.FailoverInstanceRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + await client.failover_instance(request) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + cloud_redis.FailoverInstanceRequest, + dict, +]) +async def test_failover_instance_rest_asyncio_call_success(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = await client.failover_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("null_interceptor", [True, False]) +async def test_failover_instance_rest_asyncio_interceptors(null_interceptor): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + transport = transports.AsyncCloudRedisRestTransport( + credentials=async_anonymous_credentials(), + interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), + ) + client = CloudRedisAsyncClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_failover_instance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_failover_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_failover_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.FailoverInstanceRequest.pb(cloud_redis.FailoverInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.read = mock.AsyncMock(return_value=return_value) + + request = cloud_redis.FailoverInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + await client.failover_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + +@pytest.mark.asyncio +async def test_delete_instance_rest_asyncio_bad_request(request_type=cloud_redis.DeleteInstanceRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + await client.delete_instance(request) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + cloud_redis.DeleteInstanceRequest, + dict, +]) +async def test_delete_instance_rest_asyncio_call_success(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = await client.delete_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("null_interceptor", [True, False]) +async def test_delete_instance_rest_asyncio_interceptors(null_interceptor): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + transport = transports.AsyncCloudRedisRestTransport( + credentials=async_anonymous_credentials(), + interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), + ) + client = CloudRedisAsyncClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_delete_instance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_delete_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_delete_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.DeleteInstanceRequest.pb(cloud_redis.DeleteInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.read = mock.AsyncMock(return_value=return_value) + + request = cloud_redis.DeleteInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + await client.delete_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + +@pytest.mark.asyncio +async def test_reschedule_maintenance_rest_asyncio_bad_request(request_type=cloud_redis.RescheduleMaintenanceRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + await client.reschedule_maintenance(request) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + cloud_redis.RescheduleMaintenanceRequest, + dict, +]) +async def test_reschedule_maintenance_rest_asyncio_call_success(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = await client.reschedule_maintenance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("null_interceptor", [True, False]) +async def test_reschedule_maintenance_rest_asyncio_interceptors(null_interceptor): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + transport = transports.AsyncCloudRedisRestTransport( + credentials=async_anonymous_credentials(), + interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), + ) + client = CloudRedisAsyncClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_reschedule_maintenance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_reschedule_maintenance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_reschedule_maintenance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.RescheduleMaintenanceRequest.pb(cloud_redis.RescheduleMaintenanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.read = mock.AsyncMock(return_value=return_value) + + request = cloud_redis.RescheduleMaintenanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + await client.reschedule_maintenance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + +@pytest.mark.asyncio +async def test_get_location_rest_asyncio_bad_request(request_type=locations_pb2.GetLocationRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + await client.get_location(request) + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + locations_pb2.GetLocationRequest, + dict, +]) +async def test_get_location_rest_asyncio(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = await client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +@pytest.mark.asyncio +async def test_list_locations_rest_asyncio_bad_request(request_type=locations_pb2.ListLocationsRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + await client.list_locations(request) + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + locations_pb2.ListLocationsRequest, + dict, +]) +async def test_list_locations_rest_asyncio(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + request_init = {'name': 'projects/sample1'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = await client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + +@pytest.mark.asyncio +async def test_cancel_operation_rest_asyncio_bad_request(request_type=operations_pb2.CancelOperationRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + await client.cancel_operation(request) + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +async def test_cancel_operation_rest_asyncio(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = await client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + +@pytest.mark.asyncio +async def test_delete_operation_rest_asyncio_bad_request(request_type=operations_pb2.DeleteOperationRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + await client.delete_operation(request) + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + operations_pb2.DeleteOperationRequest, + dict, +]) +async def test_delete_operation_rest_asyncio(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = await client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + +@pytest.mark.asyncio +async def test_get_operation_rest_asyncio_bad_request(request_type=operations_pb2.GetOperationRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + await client.get_operation(request) + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +async def test_get_operation_rest_asyncio(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = await client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +@pytest.mark.asyncio +async def test_list_operations_rest_asyncio_bad_request(request_type=operations_pb2.ListOperationsRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + await client.list_operations(request) + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +async def test_list_operations_rest_asyncio(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = await client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +@pytest.mark.asyncio +async def test_wait_operation_rest_asyncio_bad_request(request_type=operations_pb2.WaitOperationRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + await client.wait_operation(request) + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + operations_pb2.WaitOperationRequest, + dict, +]) +async def test_wait_operation_rest_asyncio(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = await client.wait_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_initialize_client_w_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_instances_empty_call_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + await client.list_instances(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.ListInstancesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_instance_empty_call_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + await client.get_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.GetInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_instance_auth_string_empty_call_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_auth_string), + '__call__') as call: + await client.get_instance_auth_string(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.GetInstanceAuthStringRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_instance_empty_call_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + await client.create_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.CreateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_instance_empty_call_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + await client.update_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.UpdateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_upgrade_instance_empty_call_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.upgrade_instance), + '__call__') as call: + await client.upgrade_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.UpgradeInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_import_instance_empty_call_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.import_instance), + '__call__') as call: + await client.import_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.ImportInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_export_instance_empty_call_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.export_instance), + '__call__') as call: + await client.export_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.ExportInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_failover_instance_empty_call_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), + '__call__') as call: + await client.failover_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.FailoverInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_instance_empty_call_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + await client.delete_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.DeleteInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_reschedule_maintenance_empty_call_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), + '__call__') as call: + await client.reschedule_maintenance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.RescheduleMaintenanceRequest() + + assert args[0] == request_msg + + +def test_cloud_redis_rest_asyncio_lro_client(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, +operations_v1.AsyncOperationsRestClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + +def test_unsupported_parameter_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + options = client_options.ClientOptions(quota_project_id="octopus") + with pytest.raises(core_exceptions.AsyncRestUnsupportedParameterError, match="google.api_core.client_options.ClientOptions.quota_project_id") as exc: # type: ignore + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + client_options=options + ) + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CloudRedisGrpcTransport, + ) + +def test_cloud_redis_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.CloudRedisTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_cloud_redis_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.CloudRedisTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_instances', + 'get_instance', + 'get_instance_auth_string', + 'create_instance', + 'update_instance', + 'upgrade_instance', + 'import_instance', + 'export_instance', + 'failover_instance', + 'delete_instance', + 'reschedule_maintenance', + 'get_location', + 'list_locations', + 'get_operation', + 'wait_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_cloud_redis_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudRedisTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_cloud_redis_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudRedisTransport() + adc.assert_called_once() + + +def test_cloud_redis_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CloudRedisClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudRedisGrpcTransport, + transports.CloudRedisGrpcAsyncIOTransport, + ], +) +def test_cloud_redis_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudRedisGrpcTransport, + transports.CloudRedisGrpcAsyncIOTransport, + transports.CloudRedisRestTransport, + ], +) +def test_cloud_redis_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CloudRedisGrpcTransport, grpc_helpers), + (transports.CloudRedisGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_cloud_redis_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "redis.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="redis.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport]) +def test_cloud_redis_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_cloud_redis_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.CloudRedisRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_cloud_redis_host_no_port(transport_name): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='redis.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'redis.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://redis.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_cloud_redis_host_with_port(transport_name): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='redis.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'redis.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://redis.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_cloud_redis_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = CloudRedisClient( + credentials=creds1, + transport=transport_name, + ) + client2 = CloudRedisClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_instances._session + session2 = client2.transport.list_instances._session + assert session1 != session2 + session1 = client1.transport.get_instance._session + session2 = client2.transport.get_instance._session + assert session1 != session2 + session1 = client1.transport.get_instance_auth_string._session + session2 = client2.transport.get_instance_auth_string._session + assert session1 != session2 + session1 = client1.transport.create_instance._session + session2 = client2.transport.create_instance._session + assert session1 != session2 + session1 = client1.transport.update_instance._session + session2 = client2.transport.update_instance._session + assert session1 != session2 + session1 = client1.transport.upgrade_instance._session + session2 = client2.transport.upgrade_instance._session + assert session1 != session2 + session1 = client1.transport.import_instance._session + session2 = client2.transport.import_instance._session + assert session1 != session2 + session1 = client1.transport.export_instance._session + session2 = client2.transport.export_instance._session + assert session1 != session2 + session1 = client1.transport.failover_instance._session + session2 = client2.transport.failover_instance._session + assert session1 != session2 + session1 = client1.transport.delete_instance._session + session2 = client2.transport.delete_instance._session + assert session1 != session2 + session1 = client1.transport.reschedule_maintenance._session + session2 = client2.transport.reschedule_maintenance._session + assert session1 != session2 +def test_cloud_redis_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudRedisGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_cloud_redis_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudRedisGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport]) +def test_cloud_redis_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred - response = client.get_location(request) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport]) +def test_cloud_redis_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() -def test_list_locations_rest_bad_request(transport: str = 'rest', request_type=locations_pb2.ListLocationsRequest): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_locations(request) -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) -def test_list_locations_rest(request_type): +def test_cloud_redis_grpc_lro_client(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport='grpc', ) - request_init = {'name': 'projects/sample1'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + transport = client.transport - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) - response = client.list_locations(request) + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) -def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): - client = CloudRedisClient( +def test_cloud_redis_grpc_lro_async_client(): + client = CloudRedisAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport='grpc_asyncio', ) + transport = client.transport - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '{}' + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.cancel_operation(request) +def test_instance_path(): + project = "squid" + location = "clam" + instance = "whelk" + expected = "projects/{project}/locations/{location}/instances/{instance}".format(project=project, location=location, instance=instance, ) + actual = CloudRedisClient.instance_path(project, location, instance) + assert expected == actual - # Establish that the response is the type that we expect. - assert response is None -def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.DeleteOperationRequest): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) +def test_parse_instance_path(): + expected = { + "project": "octopus", + "location": "oyster", + "instance": "nudibranch", + } + path = CloudRedisClient.instance_path(**expected) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_instance_path(path) + assert expected == actual - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = CloudRedisClient.common_billing_account_path(billing_account) + assert expected == actual -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '{}' +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = CloudRedisClient.common_billing_account_path(**expected) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_common_billing_account_path(path) + assert expected == actual - response = client.delete_operation(request) +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format(folder=folder, ) + actual = CloudRedisClient.common_folder_path(folder) + assert expected == actual - # Establish that the response is the type that we expect. - assert response is None -def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = CloudRedisClient.common_folder_path(**expected) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format(organization=organization, ) + actual = CloudRedisClient.common_organization_path(organization) + assert expected == actual - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = CloudRedisClient.common_organization_path(**expected) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_common_organization_path(path) + assert expected == actual - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format(project=project, ) + actual = CloudRedisClient.common_project_path(project) + assert expected == actual - response = client.get_operation(request) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = CloudRedisClient.common_project_path(**expected) -def test_list_operations_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.ListOperationsRequest): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_common_project_path(path) + assert expected == actual - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = CloudRedisClient.common_location_path(project, location) + assert expected == actual - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = CloudRedisClient.common_location_path(**expected) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_common_location_path(path) + assert expected == actual - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_operations(request) +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + with mock.patch.object(transports.CloudRedisTransport, '_prep_wrapped_messages') as prep: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.CloudRedisTransport, '_prep_wrapped_messages') as prep: + transport_class = CloudRedisClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) def test_delete_operation(transport: str = "grpc"): @@ -9639,6 +12221,134 @@ async def test_cancel_operation_from_dict_async(): ) call.assert_called() +def test_wait_operation(transport: str = "grpc"): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.WaitOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.wait_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_wait_operation(transport: str = "grpc_asyncio"): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.WaitOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.wait_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_wait_operation_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.WaitOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.wait_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_wait_operation_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.WaitOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.wait_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_wait_operation_from_dict(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.wait_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_wait_operation_from_dict_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.wait_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + def test_get_operation(transport: str = "grpc"): client = CloudRedisClient( @@ -10155,21 +12865,53 @@ async def test_get_location_from_dict_async(): call.assert_called() -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } +def test_transport_close_grpc(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() - for transport, close_name in transports.items(): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() def test_client_ctx(): transports = [ diff --git a/tests/integration/redis_v1.yaml b/tests/integration/redis_v1.yaml index 47440ccdad..efb6675bbf 100644 --- a/tests/integration/redis_v1.yaml +++ b/tests/integration/redis_v1.yaml @@ -49,6 +49,9 @@ http: get: '/v1/{name=projects/*/locations/*/operations/*}' - selector: google.longrunning.Operations.ListOperations get: '/v1/{name=projects/*/locations/*}/operations' + - selector: google.longrunning.Operations.WaitOperation + post: '/v2/{name=projects/*/locations/*/operations/*}:wait' + body: '*' authentication: rules: diff --git a/tests/system/conftest.py b/tests/system/conftest.py index 7b541976fc..395467661c 100644 --- a/tests/system/conftest.py +++ b/tests/system/conftest.py @@ -24,6 +24,7 @@ from google.auth.aio import credentials as ga_credentials_async HAS_GOOGLE_AUTH_AIO = True +# NOTE: `pragma: NO COVER` is needed since the coverage for presubmits isn't combined. except ImportError: # pragma: NO COVER HAS_GOOGLE_AUTH_AIO = False import google.auth @@ -37,6 +38,16 @@ import asyncio from google.showcase import EchoAsyncClient from google.showcase import IdentityAsyncClient + try: + from google.showcase_v1beta1.services.echo.transports import AsyncEchoRestTransport + HAS_ASYNC_REST_ECHO_TRANSPORT = True + except: + HAS_ASYNC_REST_ECHO_TRANSPORT = False + try: + from google.showcase_v1beta1.services.identity.transports import AsyncIdentityRestTransport + HAS_ASYNC_REST_IDENTITY_TRANSPORT = True + except: + HAS_ASYNC_REST_IDENTITY_TRANSPORT = False # TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. # See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. @@ -57,23 +68,29 @@ def async_anonymous_credentials(): def event_loop(): return asyncio.get_event_loop() - @pytest.fixture - def async_echo(use_mtls, event_loop): + @pytest.fixture(params=["grpc_asyncio", "rest_asyncio"]) + def async_echo(use_mtls, request, event_loop): + transport = request.param + if transport == "rest_asyncio" and not HAS_ASYNC_REST_ECHO_TRANSPORT: + pytest.skip("Skipping test with async rest.") return construct_client( EchoAsyncClient, use_mtls, - transport_name="grpc_asyncio", - channel_creator=aio.insecure_channel, + transport_name=transport, + channel_creator=aio.insecure_channel if request.param == "grpc_asyncio" else None, credentials=async_anonymous_credentials(), ) - @pytest.fixture - def async_identity(use_mtls, event_loop): + @pytest.fixture(params=["grpc_asyncio", "rest_asyncio"]) + def async_identity(use_mtls, request, event_loop): + transport = request.param + if transport == "rest_asyncio" and not HAS_ASYNC_REST_IDENTITY_TRANSPORT: + pytest.skip("Skipping test with async rest.") return construct_client( IdentityAsyncClient, use_mtls, - transport_name="grpc_asyncio", - channel_creator=aio.insecure_channel, + transport_name=transport, + channel_creator=aio.insecure_channel if request.param == "grpc_asyncio" else None, credentials=async_anonymous_credentials(), ) @@ -135,7 +152,7 @@ def construct_client( credentials=credentials, channel=channel_creator(transport_endpoint), ) - elif transport_name == "rest": + elif transport_name in ["rest", "rest_asyncio"]: # The custom host explicitly bypasses https. transport = transport_cls( credentials=credentials, diff --git a/tests/system/test_client_context_manager.py b/tests/system/test_client_context_manager.py index 0d20292dc6..541de4c5b9 100644 --- a/tests/system/test_client_context_manager.py +++ b/tests/system/test_client_context_manager.py @@ -15,6 +15,7 @@ import os import pytest import grpc +from google.auth import exceptions def test_client(echo): @@ -50,7 +51,7 @@ async def test_client_async(async_echo): @pytest.mark.asyncio async def test_client_destroyed_async(async_echo): await async_echo.__aexit__(None, None, None) - with pytest.raises(grpc._cython.cygrpc.UsageError): + with pytest.raises((grpc._cython.cygrpc.UsageError, exceptions.TransportError)): await async_echo.echo({ 'content': 'hello' }) diff --git a/tests/system/test_lro.py b/tests/system/test_lro.py index 8098519d9e..99bbba007c 100644 --- a/tests/system/test_lro.py +++ b/tests/system/test_lro.py @@ -20,10 +20,6 @@ def test_lro(echo): - if isinstance(echo.transport, type(echo).get_transport_class("rest")): - # (TODO: dovs) Temporarily disabling rest - return - future = echo.wait({ 'end_time': datetime.now(tz=timezone.utc) + timedelta(seconds=1), 'success': { @@ -39,6 +35,7 @@ def test_lro(echo): @pytest.mark.asyncio async def test_lro_async(async_echo): + future = await async_echo.wait({ 'end_time': datetime.now(tz=timezone.utc) + timedelta(seconds=1), 'success': { diff --git a/tests/system/test_mixins.py b/tests/system/test_mixins.py new file mode 100644 index 0000000000..87a926e72d --- /dev/null +++ b/tests/system/test_mixins.py @@ -0,0 +1,158 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import pytest + +from google.api_core import exceptions + + +def test_get_operation(echo): + with pytest.raises(exceptions.NotFound): + echo.get_operation({"name": "operations/**"}) + + +def test_list_operations(echo): + response = echo.list_operations({"name": "operations/name"}) + assert response.operations[0].name == "a/pending/thing" + + +def test_delete_operation(echo): + response = echo.delete_operation({"name": "operations/name"}) + assert response is None + + +def test_cancel_operation(echo): + response = echo.cancel_operation({"name": "operations/name"}) + assert response is None + + +def test_set_iam_policy(echo): + policy = echo.set_iam_policy( + {"resource": "users/user", "policy": {"version": 20240919}} + ) + assert policy.version == 20240919 + + +def test_get_iam_policy(echo): + # First we need to set a policy, before we can get it + echo.set_iam_policy( + {"resource": "users/user", "policy": {"version": 20240920}} + ) + policy = echo.get_iam_policy( + { + "resource": "users/user", + } + ) + assert policy.version == 20240920 + + +def test_test_iam_permissions(echo): + # First we need to set a policy, before we can call test_iam_permissions + echo.set_iam_policy( + {"resource": "users/user", "policy": {"version": 20240920}} + ) + response = echo.test_iam_permissions( + {"resource": "users/user", "permissions": ["test_some_permission"]} + ) + assert response.permissions == ["test_some_permission"] + + +def test_get_location(echo): + response = echo.get_location( + { + "name": "projects/some_project/locations/some_location", + } + ) + assert response.name == "projects/some_project/locations/some_location" + + +def test_list_locations(echo): + response = echo.list_locations( + { + "name": "projects/some_project", + } + ) + assert response.locations[0].name == "projects/some_project/locations/us-north" + + +if os.environ.get("GAPIC_PYTHON_ASYNC", "true") == "true": + + @pytest.mark.asyncio + async def test_get_operation_async(async_echo): + with pytest.raises(exceptions.NotFound): + await async_echo.get_operation({"name": "operations/**"}) + + @pytest.mark.asyncio + async def test_list_operations_async(async_echo): + response = await async_echo.list_operations({"name": "operations/name"}) + assert response.operations[0].name == "a/pending/thing" + + @pytest.mark.asyncio + async def test_delete_operation_async(async_echo): + await async_echo.delete_operation({"name": "operations/name"}) + + @pytest.mark.asyncio + async def test_cancel_operation_async(async_echo): + await async_echo.cancel_operation({"name": "operations/name"}) + + @pytest.mark.asyncio + async def test_set_iam_policy_async(async_echo): + policy = await async_echo.set_iam_policy( + {"resource": "users/user", "policy": {"version": 20240919}} + ) + assert policy.version == 20240919 + + @pytest.mark.asyncio + async def test_get_iam_policy_async(async_echo): + # First we need to set a policy, before we can get it + await async_echo.set_iam_policy( + {"resource": "users/user", "policy": {"version": 20240920}} + ) + policy = await async_echo.get_iam_policy( + { + "resource": "users/user", + } + ) + assert policy.version == 20240920 + + @pytest.mark.asyncio + async def test_test_iam_permissions_async(async_echo): + # First we need to set a policy, before we can get it + await async_echo.set_iam_policy( + {"resource": "users/user", "policy": {"version": 20240920}} + ) + + response = await async_echo.test_iam_permissions( + {"resource": "users/user", "permissions": ["test_some_permission"]} + ) + assert response.permissions == ["test_some_permission"] + + @pytest.mark.asyncio + async def test_get_location_async(async_echo): + response = await async_echo.get_location( + { + "name": "projects/some_project/locations/some_location", + } + ) + assert response.name == "projects/some_project/locations/some_location" + + @pytest.mark.asyncio + async def test_list_locations_async(async_echo): + response = await async_echo.list_locations( + { + "name": "projects/some_project", + } + ) + assert response.locations[0].name == "projects/some_project/locations/us-north" diff --git a/tests/system/test_request_metadata.py b/tests/system/test_request_metadata.py new file mode 100644 index 0000000000..efc230f823 --- /dev/null +++ b/tests/system/test_request_metadata.py @@ -0,0 +1,54 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from google import showcase + + +def test_metadata_string(echo): + echo.echo( + showcase.EchoRequest( + content="The hail in Wales falls mainly on the snails.", + request_id="some_value", + other_request_id="", + ), + metadata=[('some-key', 'some_value')] + ) + + +def test_metadata_binary(echo): + echo.echo( + showcase.EchoRequest( + content="The hail in Wales falls mainly on the snails.", + request_id="some_value", + other_request_id="", + ), + metadata=[('some-key-bin', b'some_value')] + ) + + if isinstance(echo.transport, type(echo).get_transport_class("grpc")): + # See https://github.com/googleapis/gapic-generator-python/issues/2250 + # and https://github.com/grpc/grpc/pull/38127. + # When the metadata key ends in `-bin`, the value should be of type + # `bytes`` rather than `str``. Otherwise, gRPC raises a TypeError. + with pytest.raises(TypeError, match="(?i)expected bytes"): + echo.echo( + showcase.EchoRequest( + content="The hail in Wales falls mainly on the snails.", + request_id="some_value", + other_request_id="", + ), + metadata=[('some-key-bin', 'some_value')] + ) diff --git a/tests/system/test_streams.py b/tests/system/test_streams.py index aa8c84c84c..b4adc6ee51 100644 --- a/tests/system/test_streams.py +++ b/tests/system/test_streams.py @@ -115,40 +115,54 @@ def test_stream_stream_passing_dict(echo): @pytest.mark.asyncio async def test_async_unary_stream_reader(async_echo): content = 'The hail in Wales falls mainly on the snails.' - call = await async_echo.expand({ + stream = await async_echo.expand({ 'content': content, }, metadata=_METADATA) + # Note: gRPC exposes `read`, REST exposes `__anext__` to read + # a chunk of response from the stream. + response_attr = '__anext__' if "rest" in str( + async_echo.transport).lower() else 'read' + # Consume the response and ensure it matches what we expect. - # with pytest.raises(exceptions.NotFound) as exc: for ground_truth in content.split(' '): - response = await call.read() + response = await getattr(stream, response_attr)() assert response.content == ground_truth assert ground_truth == 'snails.' - trailing_metadata = await call.trailing_metadata() - assert _METADATA[0] in trailing_metadata.items() + # Note: trailing metadata is part of a gRPC response. + if "grpc" in str(async_echo.transport).lower(): + trailing_metadata = await stream.trailing_metadata() + assert _METADATA[0] in trailing_metadata.items() @pytest.mark.asyncio async def test_async_unary_stream_async_generator(async_echo): content = 'The hail in Wales falls mainly on the snails.' - call = await async_echo.expand({ + stream = await async_echo.expand({ 'content': content, }, metadata=_METADATA) # Consume the response and ensure it matches what we expect. - # with pytest.raises(exceptions.NotFound) as exc: tokens = iter(content.split(' ')) - async for response in call: + async for response in stream: ground_truth = next(tokens) assert response.content == ground_truth assert ground_truth == 'snails.' - trailing_metadata = await call.trailing_metadata() - assert _METADATA[0] in trailing_metadata.items() + # Note: trailing metadata is part of a gRPC response. + if "grpc" in str(async_echo.transport).lower(): + trailing_metadata = await stream.trailing_metadata() + assert _METADATA[0] in trailing_metadata.items() @pytest.mark.asyncio async def test_async_stream_unary_iterable(async_echo): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2169): Add test for async rest client-streaming. + # NOTE: There are currently no plans for supporting async rest client-streaming. + if "rest" in str(async_echo.transport).lower(): + with pytest.raises(NotImplementedError): + call = await async_echo.collect() + return + requests = [] requests.append(showcase.EchoRequest(content="hello")) requests.append(showcase.EchoRequest(content="world!")) @@ -159,6 +173,12 @@ async def test_async_stream_unary_iterable(async_echo): @pytest.mark.asyncio async def test_async_stream_unary_async_generator(async_echo): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2169): Add test for async rest client-streaming. + # NOTE: There are currently no plans for supporting async rest client-streaming. + if "rest" in str(async_echo.transport).lower(): + with pytest.raises(NotImplementedError): + call = await async_echo.collect() + return async def async_generator(): yield showcase.EchoRequest(content="hello") @@ -170,6 +190,12 @@ async def async_generator(): @pytest.mark.asyncio async def test_async_stream_unary_writer(async_echo): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2169): Add test for async rest client-streaming. + # NOTE: There are currently no plans for supporting async rest client-streaming. + if "rest" in str(async_echo.transport).lower(): + with pytest.raises(NotImplementedError): + call = await async_echo.collect() + return call = await async_echo.collect() await call.write(showcase.EchoRequest(content="hello")) await call.write(showcase.EchoRequest(content="world!")) @@ -180,6 +206,13 @@ async def test_async_stream_unary_writer(async_echo): @pytest.mark.asyncio async def test_async_stream_unary_passing_dict(async_echo): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2169): Add test for async rest client-streaming. + # NOTE: There are currently no plans for supporting async rest client-streaming. + if "rest" in str(async_echo.transport).lower(): + with pytest.raises(NotImplementedError): + call = await async_echo.collect() + return + requests = [{'content': 'hello'}, {'content': 'world!'}] call = await async_echo.collect(iter(requests)) response = await call @@ -187,6 +220,13 @@ async def test_async_stream_unary_passing_dict(async_echo): @pytest.mark.asyncio async def test_async_stream_stream_reader_writier(async_echo): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2169): Add test for async rest client-streaming. + # NOTE: There are currently no plans for supporting async rest client-streaming. + if "rest" in str(async_echo.transport).lower(): + with pytest.raises(NotImplementedError): + call = await async_echo.chat(metadata=_METADATA) + return + call = await async_echo.chat(metadata=_METADATA) await call.write(showcase.EchoRequest(content="hello")) await call.write(showcase.EchoRequest(content="world!")) @@ -203,6 +243,12 @@ async def test_async_stream_stream_reader_writier(async_echo): @pytest.mark.asyncio async def test_async_stream_stream_async_generator(async_echo): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2169): Add test for async rest client-streaming. + # NOTE: There are currently no plans for supporting async rest client-streaming. + if "rest" in str(async_echo.transport).lower(): + with pytest.raises(NotImplementedError): + call = await async_echo.chat(metadata=_METADATA) + return async def async_generator(): yield showcase.EchoRequest(content="hello") @@ -220,6 +266,13 @@ async def async_generator(): @pytest.mark.asyncio async def test_async_stream_stream_passing_dict(async_echo): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2169): Add test for async rest client-streaming. + # NOTE: There are currently no plans for supporting async rest client-streaming. + if "rest" in str(async_echo.transport).lower(): + with pytest.raises(NotImplementedError): + call = await async_echo.chat(metadata=_METADATA) + return + requests = [{'content': 'hello'}, {'content': 'world!'}] call = await async_echo.chat(iter(requests), metadata=_METADATA) diff --git a/tests/system/test_unary.py b/tests/system/test_unary.py index 59f0ad1c5c..674919eb64 100644 --- a/tests/system/test_unary.py +++ b/tests/system/test_unary.py @@ -148,13 +148,17 @@ async def test_async_unary_with_dict(async_echo): @pytest.mark.asyncio async def test_async_unary_error(async_echo): message = "Bad things! Bad things!" - with pytest.raises(exceptions.InvalidArgument) as exc: + expected_err_message = message if "grpc_asyncio" in str( + async_echo.transport) else f"POST http://localhost:7469/v1beta1/echo:echo: {message}" + # Note: InvalidArgument is from gRPC, BadRequest from http (no MTLS) + with pytest.raises((exceptions.InvalidArgument, exceptions.BadRequest)) as exc: await async_echo.echo( { "error": { - "code": code_pb2.Code.Value("INVALID_ARGUMENT"), + "code": code_pb2.Code.Value("INVALID_ARGUMENT",), "message": message, }, } ) - assert exc.value.message == message + assert exc.value.code == 400 + assert exc.value.message == expected_err_message diff --git a/tests/system/test_universe_domain.py b/tests/system/test_universe_domain.py index 9690085876..df8c197305 100644 --- a/tests/system/test_universe_domain.py +++ b/tests/system/test_universe_domain.py @@ -73,9 +73,9 @@ def test_universe_domain_validation_fail(parametrized_echo, channel_creator, tra # Make this test unconditional once the minimum supported version of # google-auth becomes 2.23.0 or higher. google_auth_major, google_auth_minor, _ = [ - int(part) for part in google.auth.__version__.split(".") + part for part in google.auth.__version__.split(".") ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + if int(google_auth_major) > 2 or (int(google_auth_major) == 2 and int(google_auth_minor) >= 23): assert parametrized_echo.transport._credentials.universe_domain == credential_universe if transport_name == "rest": assert parametrized_echo.api_endpoint == "http://" + transport_endpoint @@ -84,9 +84,11 @@ def test_universe_domain_validation_fail(parametrized_echo, channel_creator, tra assert parametrized_echo.api_endpoint == "localhost:7469" else: assert parametrized_echo.api_endpoint == transport_endpoint - with pytest.raises(ValueError) as err: - parametrized_echo.echo({ - 'content': 'Universe validation failed!' - }) - assert str( - err.value) == f"The configured universe domain ({client_universe}) does not match the universe domain found in the credentials ({credential_universe}). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # NOTE (b/349488459): universe validation is disabled until further notice. + # with pytest.raises(ValueError) as err: + # parametrized_echo.echo({ + # 'content': 'Universe validation failed!' + # }) + # assert str( + # err.value) == f"The configured universe domain ({client_universe}) does not match the universe domain found in the credentials ({credential_universe}). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." diff --git a/tests/unit/samplegen/test_integration.py b/tests/unit/samplegen/test_integration.py index 39e8ec0956..b6c7e1dad7 100644 --- a/tests/unit/samplegen/test_integration.py +++ b/tests/unit/samplegen/test_integration.py @@ -160,7 +160,10 @@ def test_generate_sample_basic(): {'type': 'molluscs_v1.ClassifyTarget', 'name': 'classify_target'}, {'type': 'google.api_core.retry.Retry', 'name': 'retry'}, {'type': 'float', 'name': 'timeout'}, - {'type': 'Sequence[Tuple[str, str]', 'name': 'metadata'} + { + 'type': 'Sequence[Tuple[str, Union[str, bytes]]]', + 'name': 'metadata' + } ], 'resultType': 'molluscs_v1.classification', 'client': { @@ -284,7 +287,10 @@ def test_generate_sample_basic_async(): {'type': 'molluscs_v1.ClassifyTarget', 'name': 'classify_target'}, {'type': 'google.api_core.retry.Retry', 'name': 'retry'}, {'type': 'float', 'name': 'timeout'}, - {'type': 'Sequence[Tuple[str, str]', 'name': 'metadata'} + { + 'type': 'Sequence[Tuple[str, Union[str, bytes]]]', + 'name': 'metadata' + } ], 'resultType': 'molluscs_v1.classification', 'client': { @@ -399,7 +405,10 @@ def test_generate_sample_basic_unflattenable(): {'type': 'molluscs_v1.classify_request', 'name': 'request'}, {'type': 'google.api_core.retry.Retry', 'name': 'retry'}, {'type': 'float', 'name': 'timeout'}, - {'type': 'Sequence[Tuple[str, str]', 'name': 'metadata'} + { + 'type': 'Sequence[Tuple[str, Union[str, bytes]]]', + 'name': 'metadata' + } ], 'resultType': 'molluscs_v1.classification', 'client': { @@ -505,7 +514,10 @@ def test_generate_sample_void_method(): {'type': 'molluscs_v1.ClassifyTarget', 'name': 'classify_target'}, {'type': 'google.api_core.retry.Retry', 'name': 'retry'}, {'type': 'float', 'name': 'timeout'}, - {'type': 'Sequence[Tuple[str, str]', 'name': 'metadata'} + { + 'type': 'Sequence[Tuple[str, Union[str, bytes]]]', + 'name': 'metadata' + } ], 'client': { 'shortName': 'MolluscServiceClient', diff --git a/tests/unit/samplegen/test_samplegen.py b/tests/unit/samplegen/test_samplegen.py index 7665c831ae..d9714c280c 100644 --- a/tests/unit/samplegen/test_samplegen.py +++ b/tests/unit/samplegen/test_samplegen.py @@ -2301,7 +2301,10 @@ def test__set_sample_metadata_server_streaming(): }, {"type": "google.api_core.retry.Retry", "name": "retry"}, {"type": "float", "name": "timeout"}, - {"type": "Sequence[Tuple[str, str]", "name": "metadata"}, + { + "type": "Sequence[Tuple[str, Union[str, bytes]]]", + "name": "metadata" + }, ], "resultType": "Iterable[animalia.mollusca_v1.types.Mollusc]", "client": { @@ -2385,7 +2388,10 @@ def test__set_sample_metadata_client_streaming(): }, {"type": "google.api_core.retry.Retry", "name": "retry"}, {"type": "float", "name": "timeout"}, - {"type": "Sequence[Tuple[str, str]", "name": "metadata"}, + { + "type": "Sequence[Tuple[str, Union[str, bytes]]]", + "name": "metadata" + }, ], "resultType": "animalia.mollusca_v1.types.Mollusc", "client": { diff --git a/tests/unit/schema/test_api.py b/tests/unit/schema/test_api.py index c8bf9706d3..afe9b1434f 100644 --- a/tests/unit/schema/test_api.py +++ b/tests/unit/schema/test_api.py @@ -2741,6 +2741,35 @@ def test_read_empty_python_settings_from_service_yaml(): == client_pb2.PythonSettings.ExperimentalFeatures() assert api_schema.all_library_settings["google.example.v1beta1"].python_settings.experimental_features.rest_async_io_enabled \ == False + assert api_schema.all_library_settings[api_schema.naming.proto_package].python_settings \ + == client_pb2.PythonSettings() + + +def test_incorrect_library_settings_version(): + # NOTE: This test case ensures that the generator is able to read + # from the default library settings if the version specified against the + # library settings in the service yaml of an API differs from the version + # of the API. + service_yaml_config = { + "apis": [ + {"name": "google.example.v1beta1.ServiceOne.Example1"}, + ], + "publishing": { + "library_settings": [ + { + "version": "google.example.v1", + "python_settings": { + "experimental_features": {"rest_async_io_enabled": True}, + }, + } + ] + }, + } + cli_options = Options(service_yaml_config=service_yaml_config) + fd = get_file_descriptor_proto_for_tests(fields=[]) + api_schema = api.API.build(fd, "google.example.v1beta1", opts=cli_options) + assert api_schema.all_library_settings[api_schema.naming.proto_package].python_settings \ + == client_pb2.PythonSettings() def test_python_settings_duplicate_version_raises_error(): diff --git a/tests/unit/schema/test_metadata.py b/tests/unit/schema/test_metadata.py index df8967da71..3189b3a9fd 100644 --- a/tests/unit/schema/test_metadata.py +++ b/tests/unit/schema/test_metadata.py @@ -97,6 +97,12 @@ def test_proto_package_version_parsing(): addr = metadata.Address(package=("bah", "v20p1", "baj", "v3")) assert addr.convert_to_versioned_package() == ("bah", "v20p1", "baj_v3") + addr = metadata.Address(package=("bab", "v1")) + assert addr.proto_package_versioned == "bab_v1" + + addr = metadata.Address(package=("bah", "v20p1", "baj", "v3")) + assert addr.proto_package_versioned == "bah.v20p1.baj_v3" + def test_address_child_no_parent(): addr = metadata.Address(package=('foo', 'bar'), module='baz') diff --git a/tests/unit/schema/wrappers/test_routing.py b/tests/unit/schema/wrappers/test_routing.py index f93d6680a0..f4e6215bca 100644 --- a/tests/unit/schema/wrappers/test_routing.py +++ b/tests/unit/schema/wrappers/test_routing.py @@ -14,6 +14,7 @@ from gapic.schema import wrappers +import json import proto import pytest @@ -23,31 +24,6 @@ class RoutingTestRequest(proto.Message): app_profile_id = proto.Field(proto.STRING, number=2) -def resolve(rule, request): - """This function performs dynamic header resolution, identical to what's in client.py.j2.""" - - def _get_field(request, field_path: str): - segments = field_path.split(".") - cur = request - for x in segments: - cur = getattr(cur, x) - return cur - - header_params = {} - for routing_param in rule.routing_parameters: - # This may raise exception (which we show to clients). - request_field_value = _get_field(request, routing_param.field) - if routing_param.path_template: - routing_param_regex = routing_param.to_regex() - regex_match = routing_param_regex.match(request_field_value) - if regex_match: - header_params[routing_param.key] = regex_match.group( - routing_param.key) - else: # No need to match - header_params[routing_param.key] = request_field_value - return header_params - - @pytest.mark.parametrize( "req, expected", [ @@ -63,7 +39,10 @@ def _get_field(request, field_path: str): def test_routing_rule_resolve_simple_extraction(req, expected): rule = wrappers.RoutingRule( [wrappers.RoutingParameter("app_profile_id", "")]) - assert resolve(rule, req) == expected + assert wrappers.RoutingRule.resolve( + rule, + RoutingTestRequest.to_dict(req) + ) == expected @pytest.mark.parametrize( @@ -82,7 +61,10 @@ def test_routing_rule_resolve_rename_extraction(req, expected): rule = wrappers.RoutingRule( [wrappers.RoutingParameter("app_profile_id", "{routing_id=**}")] ) - assert resolve(rule, req) == expected + assert wrappers.RoutingRule.resolve( + rule, + RoutingTestRequest.to_dict(req) + ) == expected @pytest.mark.parametrize( @@ -111,7 +93,10 @@ def test_routing_rule_resolve_field_match(req, expected): ), ] ) - assert resolve(rule, req) == expected + assert wrappers.RoutingRule.resolve( + rule, + RoutingTestRequest.to_dict(req) + ) == expected @pytest.mark.parametrize( @@ -135,6 +120,9 @@ def test_routing_rule_resolve_field_match(req, expected): wrappers.RoutingParameter( "table_name", "projects/*/{instance_id=instances/*}/**" ), + wrappers.RoutingParameter( + "doesnotexist", "projects/*/{instance_id=instances/*}/**" + ), ], RoutingTestRequest( table_name="projects/100/instances/200/tables/300"), @@ -144,7 +132,15 @@ def test_routing_rule_resolve_field_match(req, expected): ) def test_routing_rule_resolve(routing_parameters, req, expected): rule = wrappers.RoutingRule(routing_parameters) - got = resolve(rule, req) + got = wrappers.RoutingRule.resolve( + rule, RoutingTestRequest.to_dict(req) + ) + assert got == expected + + rule = wrappers.RoutingRule(routing_parameters) + got = wrappers.RoutingRule.resolve( + rule, json.dumps(RoutingTestRequest.to_dict(req)) + ) assert got == expected